tree-wide: parallel: Fully removed all Lrc
, replaced with Arc
This commit is contained in:
parent
613bdd4997
commit
0a21f1d0a2
77 changed files with 405 additions and 395 deletions
|
@ -19,6 +19,7 @@
|
||||||
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
|
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{cmp, fmt};
|
use std::{cmp, fmt};
|
||||||
|
|
||||||
pub use GenericArgs::*;
|
pub use GenericArgs::*;
|
||||||
|
@ -27,7 +28,6 @@ pub use rustc_ast_ir::{Movability, Mutability, Pinnedness};
|
||||||
use rustc_data_structures::packed::Pu128;
|
use rustc_data_structures::packed::Pu128;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::tagged_ptr::Tag;
|
use rustc_data_structures::tagged_ptr::Tag;
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
pub use rustc_span::AttrId;
|
pub use rustc_span::AttrId;
|
||||||
|
@ -1611,7 +1611,7 @@ pub enum ExprKind {
|
||||||
/// Added for optimization purposes to avoid the need to escape
|
/// Added for optimization purposes to avoid the need to escape
|
||||||
/// large binary blobs - should always behave like [`ExprKind::Lit`]
|
/// large binary blobs - should always behave like [`ExprKind::Lit`]
|
||||||
/// with a `ByteStr` literal.
|
/// with a `ByteStr` literal.
|
||||||
IncludedBytes(Lrc<[u8]>),
|
IncludedBytes(Arc<[u8]>),
|
||||||
|
|
||||||
/// A `format_args!()` expression.
|
/// A `format_args!()` expression.
|
||||||
FormatArgs(P<FormatArgs>),
|
FormatArgs(P<FormatArgs>),
|
||||||
|
@ -1904,9 +1904,9 @@ pub enum LitKind {
|
||||||
Str(Symbol, StrStyle),
|
Str(Symbol, StrStyle),
|
||||||
/// A byte string (`b"foo"`). Not stored as a symbol because it might be
|
/// A byte string (`b"foo"`). Not stored as a symbol because it might be
|
||||||
/// non-utf8, and symbols only allow utf8 strings.
|
/// non-utf8, and symbols only allow utf8 strings.
|
||||||
ByteStr(Lrc<[u8]>, StrStyle),
|
ByteStr(Arc<[u8]>, StrStyle),
|
||||||
/// A C String (`c"foo"`). Guaranteed to only have `\0` at the end.
|
/// A C String (`c"foo"`). Guaranteed to only have `\0` at the end.
|
||||||
CStr(Lrc<[u8]>, StrStyle),
|
CStr(Arc<[u8]>, StrStyle),
|
||||||
/// A byte char (`b'f'`).
|
/// A byte char (`b'f'`).
|
||||||
Byte(u8),
|
Byte(u8),
|
||||||
/// A character literal (`'a'`).
|
/// A character literal (`'a'`).
|
||||||
|
|
|
@ -9,10 +9,10 @@
|
||||||
|
|
||||||
use std::ops::DerefMut;
|
use std::ops::DerefMut;
|
||||||
use std::panic;
|
use std::panic;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
||||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_span::source_map::Spanned;
|
use rustc_span::source_map::Spanned;
|
||||||
use rustc_span::{Ident, Span};
|
use rustc_span::{Ident, Span};
|
||||||
use smallvec::{Array, SmallVec, smallvec};
|
use smallvec::{Array, SmallVec, smallvec};
|
||||||
|
@ -789,14 +789,14 @@ fn visit_tt<T: MutVisitor>(vis: &mut T, tt: &mut TokenTree) {
|
||||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||||
fn visit_tts<T: MutVisitor>(vis: &mut T, TokenStream(tts): &mut TokenStream) {
|
fn visit_tts<T: MutVisitor>(vis: &mut T, TokenStream(tts): &mut TokenStream) {
|
||||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||||
let tts = Lrc::make_mut(tts);
|
let tts = Arc::make_mut(tts);
|
||||||
visit_vec(tts, |tree| visit_tt(vis, tree));
|
visit_vec(tts, |tree| visit_tt(vis, tree));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_attr_tts<T: MutVisitor>(vis: &mut T, AttrTokenStream(tts): &mut AttrTokenStream) {
|
fn visit_attr_tts<T: MutVisitor>(vis: &mut T, AttrTokenStream(tts): &mut AttrTokenStream) {
|
||||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||||
let tts = Lrc::make_mut(tts);
|
let tts = Arc::make_mut(tts);
|
||||||
visit_vec(tts, |tree| visit_attr_tt(vis, tree));
|
visit_vec(tts, |tree| visit_attr_tt(vis, tree));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -836,7 +836,7 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
|
||||||
vis.visit_ident(ident);
|
vis.visit_ident(ident);
|
||||||
}
|
}
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt) => {
|
||||||
let nt = Lrc::make_mut(nt);
|
let nt = Arc::make_mut(nt);
|
||||||
visit_nonterminal(vis, nt);
|
visit_nonterminal(vis, nt);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub use BinOpToken::*;
|
pub use BinOpToken::*;
|
||||||
pub use LitKind::*;
|
pub use LitKind::*;
|
||||||
|
@ -8,7 +9,6 @@ pub use NtExprKind::*;
|
||||||
pub use NtPatKind::*;
|
pub use NtPatKind::*;
|
||||||
pub use TokenKind::*;
|
pub use TokenKind::*;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
||||||
|
@ -451,7 +451,7 @@ pub enum TokenKind {
|
||||||
/// The span in the surrounding `Token` is that of the metavariable in the
|
/// The span in the surrounding `Token` is that of the metavariable in the
|
||||||
/// macro's RHS. The span within the Nonterminal is that of the fragment
|
/// macro's RHS. The span within the Nonterminal is that of the fragment
|
||||||
/// passed to the macro at the call site.
|
/// passed to the macro at the call site.
|
||||||
Interpolated(Lrc<Nonterminal>),
|
Interpolated(Arc<Nonterminal>),
|
||||||
|
|
||||||
/// A doc comment token.
|
/// A doc comment token.
|
||||||
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
||||||
|
@ -469,7 +469,7 @@ impl Clone for TokenKind {
|
||||||
// a copy. This is faster than the `derive(Clone)` version which has a
|
// a copy. This is faster than the `derive(Clone)` version which has a
|
||||||
// separate path for every variant.
|
// separate path for every variant.
|
||||||
match self {
|
match self {
|
||||||
Interpolated(nt) => Interpolated(Lrc::clone(nt)),
|
Interpolated(nt) => Interpolated(Arc::clone(nt)),
|
||||||
_ => unsafe { std::ptr::read(self) },
|
_ => unsafe { std::ptr::read(self) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,10 +14,11 @@
|
||||||
//! ownership of the original.
|
//! ownership of the original.
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{cmp, fmt, iter};
|
use std::{cmp, fmt, iter};
|
||||||
|
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::{self, Lrc};
|
use rustc_data_structures::sync;
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
use rustc_serialize::{Decodable, Encodable};
|
use rustc_serialize::{Decodable, Encodable};
|
||||||
use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
|
use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
|
||||||
|
@ -119,11 +120,11 @@ impl ToAttrTokenStream for AttrTokenStream {
|
||||||
/// of an actual `TokenStream` until it is needed.
|
/// of an actual `TokenStream` until it is needed.
|
||||||
/// `Box` is here only to reduce the structure size.
|
/// `Box` is here only to reduce the structure size.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct LazyAttrTokenStream(Lrc<Box<dyn ToAttrTokenStream>>);
|
pub struct LazyAttrTokenStream(Arc<Box<dyn ToAttrTokenStream>>);
|
||||||
|
|
||||||
impl LazyAttrTokenStream {
|
impl LazyAttrTokenStream {
|
||||||
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
|
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
|
||||||
LazyAttrTokenStream(Lrc::new(Box::new(inner)))
|
LazyAttrTokenStream(Arc::new(Box::new(inner)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
|
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||||
|
@ -160,7 +161,7 @@ impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
|
||||||
/// during expansion to perform early cfg-expansion, and to process attributes
|
/// during expansion to perform early cfg-expansion, and to process attributes
|
||||||
/// during proc-macro invocations.
|
/// during proc-macro invocations.
|
||||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||||
pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
|
pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
|
||||||
|
|
||||||
/// Like `TokenTree`, but for `AttrTokenStream`.
|
/// Like `TokenTree`, but for `AttrTokenStream`.
|
||||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||||
|
@ -175,7 +176,7 @@ pub enum AttrTokenTree {
|
||||||
|
|
||||||
impl AttrTokenStream {
|
impl AttrTokenStream {
|
||||||
pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
|
pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
|
||||||
AttrTokenStream(Lrc::new(tokens))
|
AttrTokenStream(Arc::new(tokens))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
|
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
|
||||||
|
@ -293,7 +294,7 @@ pub struct AttrsTarget {
|
||||||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
||||||
/// backwards compatibility.
|
/// backwards compatibility.
|
||||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||||
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
|
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
|
||||||
|
|
||||||
/// Indicates whether a token can join with the following token to form a
|
/// Indicates whether a token can join with the following token to form a
|
||||||
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
|
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
|
||||||
|
@ -412,7 +413,7 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||||
|
|
||||||
impl TokenStream {
|
impl TokenStream {
|
||||||
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
||||||
TokenStream(Lrc::new(tts))
|
TokenStream(Arc::new(tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
|
@ -544,7 +545,7 @@ impl TokenStream {
|
||||||
/// Push `tt` onto the end of the stream, possibly gluing it to the last
|
/// Push `tt` onto the end of the stream, possibly gluing it to the last
|
||||||
/// token. Uses `make_mut` to maximize efficiency.
|
/// token. Uses `make_mut` to maximize efficiency.
|
||||||
pub fn push_tree(&mut self, tt: TokenTree) {
|
pub fn push_tree(&mut self, tt: TokenTree) {
|
||||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
let vec_mut = Arc::make_mut(&mut self.0);
|
||||||
|
|
||||||
if Self::try_glue_to_last(vec_mut, &tt) {
|
if Self::try_glue_to_last(vec_mut, &tt) {
|
||||||
// nothing else to do
|
// nothing else to do
|
||||||
|
@ -557,7 +558,7 @@ impl TokenStream {
|
||||||
/// token tree to the last token. (No other token trees will be glued.)
|
/// token tree to the last token. (No other token trees will be glued.)
|
||||||
/// Uses `make_mut` to maximize efficiency.
|
/// Uses `make_mut` to maximize efficiency.
|
||||||
pub fn push_stream(&mut self, stream: TokenStream) {
|
pub fn push_stream(&mut self, stream: TokenStream) {
|
||||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
let vec_mut = Arc::make_mut(&mut self.0);
|
||||||
|
|
||||||
let stream_iter = stream.0.iter().cloned();
|
let stream_iter = stream.0.iter().cloned();
|
||||||
|
|
||||||
|
@ -577,7 +578,7 @@ impl TokenStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Desugar doc comments like `/// foo` in the stream into `#[doc =
|
/// Desugar doc comments like `/// foo` in the stream into `#[doc =
|
||||||
/// r"foo"]`. Modifies the `TokenStream` via `Lrc::make_mut`, but as little
|
/// r"foo"]`. Modifies the `TokenStream` via `Arc::make_mut`, but as little
|
||||||
/// as possible.
|
/// as possible.
|
||||||
pub fn desugar_doc_comments(&mut self) {
|
pub fn desugar_doc_comments(&mut self) {
|
||||||
if let Some(desugared_stream) = desugar_inner(self.clone()) {
|
if let Some(desugared_stream) = desugar_inner(self.clone()) {
|
||||||
|
@ -596,7 +597,7 @@ impl TokenStream {
|
||||||
) => {
|
) => {
|
||||||
let desugared = desugared_tts(attr_style, data, span);
|
let desugared = desugared_tts(attr_style, data, span);
|
||||||
let desugared_len = desugared.len();
|
let desugared_len = desugared.len();
|
||||||
Lrc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
|
Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
|
||||||
modified = true;
|
modified = true;
|
||||||
i += desugared_len;
|
i += desugared_len;
|
||||||
}
|
}
|
||||||
|
@ -607,7 +608,7 @@ impl TokenStream {
|
||||||
if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
|
if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
|
||||||
let new_tt =
|
let new_tt =
|
||||||
TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
|
TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
|
||||||
Lrc::make_mut(&mut stream.0)[i] = new_tt;
|
Arc::make_mut(&mut stream.0)[i] = new_tt;
|
||||||
modified = true;
|
modified = true;
|
||||||
}
|
}
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
|
@ -121,7 +121,7 @@ impl LitKind {
|
||||||
}
|
}
|
||||||
token::ByteStrRaw(n) => {
|
token::ByteStrRaw(n) => {
|
||||||
// Raw strings have no escapes so we can convert the symbol
|
// Raw strings have no escapes so we can convert the symbol
|
||||||
// directly to a `Lrc<u8>`.
|
// directly to a `Arc<u8>`.
|
||||||
let buf = symbol.as_str().to_owned().into_bytes();
|
let buf = symbol.as_str().to_owned().into_bytes();
|
||||||
LitKind::ByteStr(buf.into(), StrStyle::Raw(n))
|
LitKind::ByteStr(buf.into(), StrStyle::Raw(n))
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ impl LitKind {
|
||||||
}
|
}
|
||||||
token::CStrRaw(n) => {
|
token::CStrRaw(n) => {
|
||||||
// Raw strings have no escapes so we can convert the symbol
|
// Raw strings have no escapes so we can convert the symbol
|
||||||
// directly to a `Lrc<u8>` after appending the terminating NUL
|
// directly to a `Arc<u8>` after appending the terminating NUL
|
||||||
// char.
|
// char.
|
||||||
let mut buf = symbol.as_str().to_owned().into_bytes();
|
let mut buf = symbol.as_str().to_owned().into_bytes();
|
||||||
buf.push(0);
|
buf.push(0);
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use std::assert_matches::assert_matches;
|
use std::assert_matches::assert_matches;
|
||||||
use std::ops::ControlFlow;
|
use std::ops::ControlFlow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::ptr::P as AstP;
|
use rustc_ast::ptr::P as AstP;
|
||||||
use rustc_ast::*;
|
use rustc_ast::*;
|
||||||
use rustc_ast_pretty::pprust::expr_to_string;
|
use rustc_ast_pretty::pprust::expr_to_string;
|
||||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::HirId;
|
use rustc_hir::HirId;
|
||||||
use rustc_hir::def::{DefKind, Res};
|
use rustc_hir::def::{DefKind, Res};
|
||||||
|
@ -147,7 +147,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
ExprKind::IncludedBytes(bytes) => {
|
ExprKind::IncludedBytes(bytes) => {
|
||||||
let lit = self.arena.alloc(respan(
|
let lit = self.arena.alloc(respan(
|
||||||
self.lower_span(e.span),
|
self.lower_span(e.span),
|
||||||
LitKind::ByteStr(Lrc::clone(bytes), StrStyle::Cooked),
|
LitKind::ByteStr(Arc::clone(bytes), StrStyle::Cooked),
|
||||||
));
|
));
|
||||||
hir::ExprKind::Lit(lit)
|
hir::ExprKind::Lit(lit)
|
||||||
}
|
}
|
||||||
|
@ -602,7 +602,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
this.mark_span_with_reason(
|
this.mark_span_with_reason(
|
||||||
DesugaringKind::TryBlock,
|
DesugaringKind::TryBlock,
|
||||||
expr.span,
|
expr.span,
|
||||||
Some(Lrc::clone(&this.allow_try_trait)),
|
Some(Arc::clone(&this.allow_try_trait)),
|
||||||
),
|
),
|
||||||
expr,
|
expr,
|
||||||
)
|
)
|
||||||
|
@ -610,7 +610,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
let try_span = this.mark_span_with_reason(
|
let try_span = this.mark_span_with_reason(
|
||||||
DesugaringKind::TryBlock,
|
DesugaringKind::TryBlock,
|
||||||
this.tcx.sess.source_map().end_point(body.span),
|
this.tcx.sess.source_map().end_point(body.span),
|
||||||
Some(Lrc::clone(&this.allow_try_trait)),
|
Some(Arc::clone(&this.allow_try_trait)),
|
||||||
);
|
);
|
||||||
|
|
||||||
(try_span, this.expr_unit(try_span))
|
(try_span, this.expr_unit(try_span))
|
||||||
|
@ -719,7 +719,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
let unstable_span = self.mark_span_with_reason(
|
let unstable_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::Async,
|
DesugaringKind::Async,
|
||||||
self.lower_span(span),
|
self.lower_span(span),
|
||||||
Some(Lrc::clone(&self.allow_gen_future)),
|
Some(Arc::clone(&self.allow_gen_future)),
|
||||||
);
|
);
|
||||||
let resume_ty =
|
let resume_ty =
|
||||||
self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None);
|
self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None);
|
||||||
|
@ -803,7 +803,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
let unstable_span = self.mark_span_with_reason(
|
let unstable_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::Async,
|
DesugaringKind::Async,
|
||||||
span,
|
span,
|
||||||
Some(Lrc::clone(&self.allow_gen_future)),
|
Some(Arc::clone(&self.allow_gen_future)),
|
||||||
);
|
);
|
||||||
self.lower_attrs(inner_hir_id, &[Attribute {
|
self.lower_attrs(inner_hir_id, &[Attribute {
|
||||||
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(
|
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(
|
||||||
|
@ -879,13 +879,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
|
|
||||||
let features = match await_kind {
|
let features = match await_kind {
|
||||||
FutureKind::Future => None,
|
FutureKind::Future => None,
|
||||||
FutureKind::AsyncIterator => Some(Lrc::clone(&self.allow_for_await)),
|
FutureKind::AsyncIterator => Some(Arc::clone(&self.allow_for_await)),
|
||||||
};
|
};
|
||||||
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features);
|
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features);
|
||||||
let gen_future_span = self.mark_span_with_reason(
|
let gen_future_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::Await,
|
DesugaringKind::Await,
|
||||||
full_span,
|
full_span,
|
||||||
Some(Lrc::clone(&self.allow_gen_future)),
|
Some(Arc::clone(&self.allow_gen_future)),
|
||||||
);
|
);
|
||||||
let expr_hir_id = expr.hir_id;
|
let expr_hir_id = expr.hir_id;
|
||||||
|
|
||||||
|
@ -1905,13 +1905,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
let unstable_span = self.mark_span_with_reason(
|
let unstable_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::QuestionMark,
|
DesugaringKind::QuestionMark,
|
||||||
span,
|
span,
|
||||||
Some(Lrc::clone(&self.allow_try_trait)),
|
Some(Arc::clone(&self.allow_try_trait)),
|
||||||
);
|
);
|
||||||
let try_span = self.tcx.sess.source_map().end_point(span);
|
let try_span = self.tcx.sess.source_map().end_point(span);
|
||||||
let try_span = self.mark_span_with_reason(
|
let try_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::QuestionMark,
|
DesugaringKind::QuestionMark,
|
||||||
try_span,
|
try_span,
|
||||||
Some(Lrc::clone(&self.allow_try_trait)),
|
Some(Arc::clone(&self.allow_try_trait)),
|
||||||
);
|
);
|
||||||
|
|
||||||
// `Try::branch(<expr>)`
|
// `Try::branch(<expr>)`
|
||||||
|
@ -2005,7 +2005,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
let unstable_span = self.mark_span_with_reason(
|
let unstable_span = self.mark_span_with_reason(
|
||||||
DesugaringKind::YeetExpr,
|
DesugaringKind::YeetExpr,
|
||||||
span,
|
span,
|
||||||
Some(Lrc::clone(&self.allow_try_trait)),
|
Some(Arc::clone(&self.allow_try_trait)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let from_yeet_expr = self.wrap_in_try_constructor(
|
let from_yeet_expr = self.wrap_in_try_constructor(
|
||||||
|
|
|
@ -41,13 +41,14 @@
|
||||||
#![warn(unreachable_pub)]
|
#![warn(unreachable_pub)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::node_id::NodeMap;
|
use rustc_ast::node_id::NodeMap;
|
||||||
use rustc_ast::{self as ast, *};
|
use rustc_ast::{self as ast, *};
|
||||||
use rustc_data_structures::captures::Captures;
|
use rustc_data_structures::captures::Captures;
|
||||||
use rustc_data_structures::fingerprint::Fingerprint;
|
use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
use rustc_data_structures::sorted_map::SortedMap;
|
use rustc_data_structures::sorted_map::SortedMap;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::tagged_ptr::TaggedRef;
|
use rustc_data_structures::tagged_ptr::TaggedRef;
|
||||||
use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle, StashKey};
|
use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle, StashKey};
|
||||||
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
|
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
|
||||||
|
@ -129,11 +130,11 @@ struct LoweringContext<'a, 'hir> {
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
node_id_to_local_id: NodeMap<hir::ItemLocalId>,
|
node_id_to_local_id: NodeMap<hir::ItemLocalId>,
|
||||||
|
|
||||||
allow_try_trait: Lrc<[Symbol]>,
|
allow_try_trait: Arc<[Symbol]>,
|
||||||
allow_gen_future: Lrc<[Symbol]>,
|
allow_gen_future: Arc<[Symbol]>,
|
||||||
allow_async_iterator: Lrc<[Symbol]>,
|
allow_async_iterator: Arc<[Symbol]>,
|
||||||
allow_for_await: Lrc<[Symbol]>,
|
allow_for_await: Arc<[Symbol]>,
|
||||||
allow_async_fn_traits: Lrc<[Symbol]>,
|
allow_async_fn_traits: Arc<[Symbol]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
|
@ -722,7 +723,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
&self,
|
&self,
|
||||||
reason: DesugaringKind,
|
reason: DesugaringKind,
|
||||||
span: Span,
|
span: Span,
|
||||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||||
) -> Span {
|
) -> Span {
|
||||||
self.tcx.with_stable_hashing_context(|hcx| {
|
self.tcx.with_stable_hashing_context(|hcx| {
|
||||||
span.mark_with_reason(allow_internal_unstable, reason, span.edition(), hcx)
|
span.mark_with_reason(allow_internal_unstable, reason, span.edition(), hcx)
|
||||||
|
@ -1664,7 +1665,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
CoroutineKind::Async { return_impl_trait_id, .. } => (return_impl_trait_id, None),
|
CoroutineKind::Async { return_impl_trait_id, .. } => (return_impl_trait_id, None),
|
||||||
CoroutineKind::Gen { return_impl_trait_id, .. } => (return_impl_trait_id, None),
|
CoroutineKind::Gen { return_impl_trait_id, .. } => (return_impl_trait_id, None),
|
||||||
CoroutineKind::AsyncGen { return_impl_trait_id, .. } => {
|
CoroutineKind::AsyncGen { return_impl_trait_id, .. } => {
|
||||||
(return_impl_trait_id, Some(Lrc::clone(&self.allow_async_iterator)))
|
(return_impl_trait_id, Some(Arc::clone(&self.allow_async_iterator)))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::{self as ast, *};
|
use rustc_ast::{self as ast, *};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::GenericArg;
|
use rustc_hir::GenericArg;
|
||||||
use rustc_hir::def::{DefKind, PartialRes, Res};
|
use rustc_hir::def::{DefKind, PartialRes, Res};
|
||||||
|
@ -72,7 +73,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
let bound_modifier_allowed_features = if let Res::Def(DefKind::Trait, async_def_id) = res
|
let bound_modifier_allowed_features = if let Res::Def(DefKind::Trait, async_def_id) = res
|
||||||
&& self.tcx.async_fn_trait_kind_from_def_id(async_def_id).is_some()
|
&& self.tcx.async_fn_trait_kind_from_def_id(async_def_id).is_some()
|
||||||
{
|
{
|
||||||
Some(Lrc::clone(&self.allow_async_fn_traits))
|
Some(Arc::clone(&self.allow_async_fn_traits))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
@ -257,7 +258,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
// Additional features ungated with a bound modifier like `async`.
|
// Additional features ungated with a bound modifier like `async`.
|
||||||
// This is passed down to the implicit associated type binding in
|
// This is passed down to the implicit associated type binding in
|
||||||
// parenthesized bounds.
|
// parenthesized bounds.
|
||||||
bound_modifier_allowed_features: Option<Lrc<[Symbol]>>,
|
bound_modifier_allowed_features: Option<Arc<[Symbol]>>,
|
||||||
) -> hir::PathSegment<'hir> {
|
) -> hir::PathSegment<'hir> {
|
||||||
debug!("path_span: {:?}, lower_path_segment(segment: {:?})", path_span, segment);
|
debug!("path_span: {:?}, lower_path_segment(segment: {:?})", path_span, segment);
|
||||||
let (mut generic_args, infer_args) = if let Some(generic_args) = segment.args.as_deref() {
|
let (mut generic_args, infer_args) = if let Some(generic_args) = segment.args.as_deref() {
|
||||||
|
@ -490,7 +491,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
&mut self,
|
&mut self,
|
||||||
data: &ParenthesizedArgs,
|
data: &ParenthesizedArgs,
|
||||||
itctx: ImplTraitContext,
|
itctx: ImplTraitContext,
|
||||||
bound_modifier_allowed_features: Option<Lrc<[Symbol]>>,
|
bound_modifier_allowed_features: Option<Arc<[Symbol]>>,
|
||||||
) -> (GenericArgsCtor<'hir>, bool) {
|
) -> (GenericArgsCtor<'hir>, bool) {
|
||||||
// Switch to `PassThrough` mode for anonymous lifetimes; this
|
// Switch to `PassThrough` mode for anonymous lifetimes; this
|
||||||
// means that we permit things like `&Ref<T>`, where `Ref` has
|
// means that we permit things like `&Ref<T>`, where `Ref` has
|
||||||
|
|
|
@ -7,6 +7,7 @@ mod fixup;
|
||||||
mod item;
|
mod item;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttrIdGenerator;
|
use rustc_ast::attr::AttrIdGenerator;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
@ -21,7 +22,6 @@ use rustc_ast::{
|
||||||
InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, PatKind,
|
InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, PatKind,
|
||||||
RangeEnd, RangeSyntax, Safety, SelfKind, Term, attr,
|
RangeEnd, RangeSyntax, Safety, SelfKind, Term, attr,
|
||||||
};
|
};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::source_map::{SourceMap, Spanned};
|
use rustc_span::source_map::{SourceMap, Spanned};
|
||||||
use rustc_span::symbol::IdentPrinter;
|
use rustc_span::symbol::IdentPrinter;
|
||||||
|
@ -106,7 +106,7 @@ fn split_block_comment_into_lines(text: &str, col: CharPos) -> Vec<String> {
|
||||||
fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> {
|
fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> {
|
||||||
let sm = SourceMap::new(sm.path_mapping().clone());
|
let sm = SourceMap::new(sm.path_mapping().clone());
|
||||||
let source_file = sm.new_source_file(path, src);
|
let source_file = sm.new_source_file(path, src);
|
||||||
let text = Lrc::clone(&(*source_file.src.as_ref().unwrap()));
|
let text = Arc::clone(&(*source_file.src.as_ref().unwrap()));
|
||||||
|
|
||||||
let text: &str = text.as_str();
|
let text: &str = text.as_str();
|
||||||
let start_bpos = source_file.start_pos;
|
let start_bpos = source_file.start_pos;
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token;
|
use rustc_ast::token;
|
||||||
use rustc_ast::tokenstream::TokenStream;
|
use rustc_ast::tokenstream::TokenStream;
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_expand::base::{
|
use rustc_expand::base::{
|
||||||
DummyResult, ExpandResult, ExtCtxt, MacEager, MacResult, MacroExpanderResult, resolve_path,
|
DummyResult, ExpandResult, ExtCtxt, MacEager, MacResult, MacroExpanderResult, resolve_path,
|
||||||
};
|
};
|
||||||
|
@ -249,7 +249,7 @@ fn load_binary_file(
|
||||||
original_path: &Path,
|
original_path: &Path,
|
||||||
macro_span: Span,
|
macro_span: Span,
|
||||||
path_span: Span,
|
path_span: Span,
|
||||||
) -> Result<(Lrc<[u8]>, Span), Box<dyn MacResult>> {
|
) -> Result<(Arc<[u8]>, Span), Box<dyn MacResult>> {
|
||||||
let resolved_path = match resolve_path(&cx.sess, original_path, macro_span) {
|
let resolved_path = match resolve_path(&cx.sess, original_path, macro_span) {
|
||||||
Ok(path) => path,
|
Ok(path) => path,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use gccjit::{Location, RValue};
|
use gccjit::{Location, RValue};
|
||||||
use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind};
|
use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind};
|
||||||
use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoCodegenMethods};
|
use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoCodegenMethods};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_index::bit_set::DenseBitSet;
|
use rustc_index::bit_set::DenseBitSet;
|
||||||
use rustc_index::{Idx, IndexVec};
|
use rustc_index::{Idx, IndexVec};
|
||||||
use rustc_middle::mir::{self, Body, SourceScope};
|
use rustc_middle::mir::{self, Body, SourceScope};
|
||||||
|
@ -172,7 +172,7 @@ fn make_mir_scope<'gcc, 'tcx>(
|
||||||
// `lookup_char_pos` return the right information instead.
|
// `lookup_char_pos` return the right information instead.
|
||||||
pub struct DebugLoc {
|
pub struct DebugLoc {
|
||||||
/// Information about the original source file.
|
/// Information about the original source file.
|
||||||
pub file: Lrc<SourceFile>,
|
pub file: Arc<SourceFile>,
|
||||||
/// The (1-based) line number.
|
/// The (1-based) line number.
|
||||||
pub line: u32,
|
pub line: u32,
|
||||||
/// The (1-based) column number.
|
/// The (1-based) column number.
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use std::cell::{OnceCell, RefCell};
|
use std::cell::{OnceCell, RefCell};
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{iter, ptr};
|
use std::{iter, ptr};
|
||||||
|
|
||||||
use libc::c_uint;
|
use libc::c_uint;
|
||||||
|
@ -10,7 +11,6 @@ use rustc_codegen_ssa::debuginfo::type_names;
|
||||||
use rustc_codegen_ssa::mir::debuginfo::VariableKind::*;
|
use rustc_codegen_ssa::mir::debuginfo::VariableKind::*;
|
||||||
use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind};
|
use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind};
|
||||||
use rustc_codegen_ssa::traits::*;
|
use rustc_codegen_ssa::traits::*;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::unord::UnordMap;
|
use rustc_data_structures::unord::UnordMap;
|
||||||
use rustc_hir::def_id::{DefId, DefIdMap};
|
use rustc_hir::def_id::{DefId, DefIdMap};
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
|
@ -248,7 +248,7 @@ impl<'ll> DebugInfoBuilderMethods for Builder<'_, 'll, '_> {
|
||||||
// `lookup_char_pos` return the right information instead.
|
// `lookup_char_pos` return the right information instead.
|
||||||
struct DebugLoc {
|
struct DebugLoc {
|
||||||
/// Information about the original source file.
|
/// Information about the original source file.
|
||||||
file: Lrc<SourceFile>,
|
file: Arc<SourceFile>,
|
||||||
/// The (1-based) line number.
|
/// The (1-based) line number.
|
||||||
line: u32,
|
line: u32,
|
||||||
/// The (1-based) column number.
|
/// The (1-based) column number.
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -7,7 +8,7 @@ use rustc_abi::FIRST_VARIANT;
|
||||||
use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_name};
|
use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_name};
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||||
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
|
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
|
||||||
use rustc_data_structures::sync::{Lrc, par_map};
|
use rustc_data_structures::sync::par_map;
|
||||||
use rustc_data_structures::unord::UnordMap;
|
use rustc_data_structures::unord::UnordMap;
|
||||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||||
use rustc_hir::lang_items::LangItem;
|
use rustc_hir::lang_items::LangItem;
|
||||||
|
@ -932,7 +933,7 @@ impl CrateInfo {
|
||||||
crate_name: UnordMap::with_capacity(n_crates),
|
crate_name: UnordMap::with_capacity(n_crates),
|
||||||
used_crates,
|
used_crates,
|
||||||
used_crate_source: UnordMap::with_capacity(n_crates),
|
used_crate_source: UnordMap::with_capacity(n_crates),
|
||||||
dependency_formats: Lrc::clone(tcx.dependency_formats(())),
|
dependency_formats: Arc::clone(tcx.dependency_formats(())),
|
||||||
windows_subsystem,
|
windows_subsystem,
|
||||||
natvis_debugger_visualizers: Default::default(),
|
natvis_debugger_visualizers: Default::default(),
|
||||||
lint_levels: CodegenLintLevels::from_tcx(tcx),
|
lint_levels: CodegenLintLevels::from_tcx(tcx),
|
||||||
|
@ -946,7 +947,7 @@ impl CrateInfo {
|
||||||
info.crate_name.insert(cnum, tcx.crate_name(cnum));
|
info.crate_name.insert(cnum, tcx.crate_name(cnum));
|
||||||
|
|
||||||
let used_crate_source = tcx.used_crate_source(cnum);
|
let used_crate_source = tcx.used_crate_source(cnum);
|
||||||
info.used_crate_source.insert(cnum, Lrc::clone(used_crate_source));
|
info.used_crate_source.insert(cnum, Arc::clone(used_crate_source));
|
||||||
if tcx.is_profiler_runtime(cnum) {
|
if tcx.is_profiler_runtime(cnum) {
|
||||||
info.profiler_runtime = Some(cnum);
|
info.profiler_runtime = Some(cnum);
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,10 +24,10 @@
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::unord::UnordMap;
|
use rustc_data_structures::unord::UnordMap;
|
||||||
use rustc_hir::CRATE_HIR_ID;
|
use rustc_hir::CRATE_HIR_ID;
|
||||||
use rustc_hir::def_id::CrateNum;
|
use rustc_hir::def_id::CrateNum;
|
||||||
|
@ -200,9 +200,9 @@ pub struct CrateInfo {
|
||||||
pub native_libraries: FxIndexMap<CrateNum, Vec<NativeLib>>,
|
pub native_libraries: FxIndexMap<CrateNum, Vec<NativeLib>>,
|
||||||
pub crate_name: UnordMap<CrateNum, Symbol>,
|
pub crate_name: UnordMap<CrateNum, Symbol>,
|
||||||
pub used_libraries: Vec<NativeLib>,
|
pub used_libraries: Vec<NativeLib>,
|
||||||
pub used_crate_source: UnordMap<CrateNum, Lrc<CrateSource>>,
|
pub used_crate_source: UnordMap<CrateNum, Arc<CrateSource>>,
|
||||||
pub used_crates: Vec<CrateNum>,
|
pub used_crates: Vec<CrateNum>,
|
||||||
pub dependency_formats: Lrc<Dependencies>,
|
pub dependency_formats: Arc<Dependencies>,
|
||||||
pub windows_subsystem: Option<String>,
|
pub windows_subsystem: Option<String>,
|
||||||
pub natvis_debugger_visualizers: BTreeSet<DebuggerVisualizerFile>,
|
pub natvis_debugger_visualizers: BTreeSet<DebuggerVisualizerFile>,
|
||||||
pub lint_levels: CodegenLintLevels,
|
pub lint_levels: CodegenLintLevels,
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
// Use our fake Send/Sync traits when on not parallel compiler,
|
// Use our fake Send/Sync traits when on not parallel compiler,
|
||||||
// so that `OwnedSlice` only implements/requires Send/Sync
|
// so that `OwnedSlice` only implements/requires Send/Sync
|
||||||
// for parallel compiler builds.
|
// for parallel compiler builds.
|
||||||
use crate::sync;
|
use crate::sync;
|
||||||
use crate::sync::Lrc;
|
|
||||||
|
|
||||||
/// An owned slice.
|
/// An owned slice.
|
||||||
///
|
///
|
||||||
/// This is similar to `Lrc<[u8]>` but allows slicing and using anything as the
|
/// This is similar to `Arc<[u8]>` but allows slicing and using anything as the
|
||||||
/// backing buffer.
|
/// backing buffer.
|
||||||
///
|
///
|
||||||
/// See [`slice_owned`] for `OwnedSlice` construction and examples.
|
/// See [`slice_owned`] for `OwnedSlice` construction and examples.
|
||||||
|
@ -34,7 +34,7 @@ pub struct OwnedSlice {
|
||||||
// \/
|
// \/
|
||||||
// ⊂(´・◡・⊂ )∘˚˳° (I am the phantom remnant of #97770)
|
// ⊂(´・◡・⊂ )∘˚˳° (I am the phantom remnant of #97770)
|
||||||
#[expect(dead_code)]
|
#[expect(dead_code)]
|
||||||
owner: Lrc<dyn sync::Send + sync::Sync>,
|
owner: Arc<dyn sync::Send + sync::Sync>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Makes an [`OwnedSlice`] out of an `owner` and a `slicer` function.
|
/// Makes an [`OwnedSlice`] out of an `owner` and a `slicer` function.
|
||||||
|
@ -86,7 +86,7 @@ where
|
||||||
// N.B. the HRTB on the `slicer` is important — without it the caller could provide
|
// N.B. the HRTB on the `slicer` is important — without it the caller could provide
|
||||||
// a short lived slice, unrelated to the owner.
|
// a short lived slice, unrelated to the owner.
|
||||||
|
|
||||||
let owner = Lrc::new(owner);
|
let owner = Arc::new(owner);
|
||||||
let bytes = slicer(&*owner)?;
|
let bytes = slicer(&*owner)?;
|
||||||
|
|
||||||
Ok(OwnedSlice { bytes, owner })
|
Ok(OwnedSlice { bytes, owner })
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
//!
|
//!
|
||||||
//! | Type | Serial version | Parallel version |
|
//! | Type | Serial version | Parallel version |
|
||||||
//! | ----------------------- | ------------------- | ------------------------------- |
|
//! | ----------------------- | ------------------- | ------------------------------- |
|
||||||
//! | `Lrc<T>` | `rc::Rc<T>` | `sync::Arc<T>` |
|
|
||||||
//! |` Weak<T>` | `rc::Weak<T>` | `sync::Weak<T>` |
|
//! |` Weak<T>` | `rc::Weak<T>` | `sync::Weak<T>` |
|
||||||
//! | `LRef<'a, T>` [^2] | `&'a mut T` | `&'a T` |
|
//! | `LRef<'a, T>` [^2] | `&'a mut T` | `&'a T` |
|
||||||
//! | | | |
|
//! | | | |
|
||||||
|
@ -109,7 +108,7 @@ pub use std::marker::{Send, Sync};
|
||||||
#[cfg(target_has_atomic = "64")]
|
#[cfg(target_has_atomic = "64")]
|
||||||
pub use std::sync::atomic::AtomicU64;
|
pub use std::sync::atomic::AtomicU64;
|
||||||
pub use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize};
|
pub use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize};
|
||||||
pub use std::sync::{Arc as Lrc, OnceLock, Weak};
|
pub use std::sync::{OnceLock, Weak};
|
||||||
|
|
||||||
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
||||||
pub use parking_lot::{
|
pub use parking_lot::{
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::LazyLock;
|
use std::sync::{Arc, LazyLock};
|
||||||
use std::{fmt, fs, io};
|
use std::{fmt, fs, io};
|
||||||
|
|
||||||
use fluent_bundle::FluentResource;
|
use fluent_bundle::FluentResource;
|
||||||
|
@ -19,7 +19,7 @@ pub use fluent_bundle::{self, FluentArgs, FluentError, FluentValue};
|
||||||
use fluent_syntax::parser::ParserError;
|
use fluent_syntax::parser::ParserError;
|
||||||
use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
|
use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
|
||||||
use intl_memoizer::concurrent::IntlLangMemoizer;
|
use intl_memoizer::concurrent::IntlLangMemoizer;
|
||||||
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
|
use rustc_data_structures::sync::IntoDynSyncSend;
|
||||||
use rustc_macros::{Decodable, Encodable};
|
use rustc_macros::{Decodable, Encodable};
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use tracing::{instrument, trace};
|
use tracing::{instrument, trace};
|
||||||
|
@ -112,7 +112,7 @@ pub fn fluent_bundle(
|
||||||
requested_locale: Option<LanguageIdentifier>,
|
requested_locale: Option<LanguageIdentifier>,
|
||||||
additional_ftl_path: Option<&Path>,
|
additional_ftl_path: Option<&Path>,
|
||||||
with_directionality_markers: bool,
|
with_directionality_markers: bool,
|
||||||
) -> Result<Option<Lrc<FluentBundle>>, TranslationBundleError> {
|
) -> Result<Option<Arc<FluentBundle>>, TranslationBundleError> {
|
||||||
if requested_locale.is_none() && additional_ftl_path.is_none() {
|
if requested_locale.is_none() && additional_ftl_path.is_none() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
@ -190,7 +190,7 @@ pub fn fluent_bundle(
|
||||||
bundle.add_resource_overriding(resource);
|
bundle.add_resource_overriding(resource);
|
||||||
}
|
}
|
||||||
|
|
||||||
let bundle = Lrc::new(bundle);
|
let bundle = Arc::new(bundle);
|
||||||
Ok(Some(bundle))
|
Ok(Some(bundle))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ fn register_functions(bundle: &mut FluentBundle) {
|
||||||
|
|
||||||
/// Type alias for the result of `fallback_fluent_bundle` - a reference-counted pointer to a lazily
|
/// Type alias for the result of `fallback_fluent_bundle` - a reference-counted pointer to a lazily
|
||||||
/// evaluated fluent bundle.
|
/// evaluated fluent bundle.
|
||||||
pub type LazyFallbackBundle = Lrc<LazyLock<FluentBundle, impl FnOnce() -> FluentBundle>>;
|
pub type LazyFallbackBundle = Arc<LazyLock<FluentBundle, impl FnOnce() -> FluentBundle>>;
|
||||||
|
|
||||||
/// Return the default `FluentBundle` with standard "en-US" diagnostic messages.
|
/// Return the default `FluentBundle` with standard "en-US" diagnostic messages.
|
||||||
#[instrument(level = "trace", skip(resources))]
|
#[instrument(level = "trace", skip(resources))]
|
||||||
|
@ -213,7 +213,7 @@ pub fn fallback_fluent_bundle(
|
||||||
resources: Vec<&'static str>,
|
resources: Vec<&'static str>,
|
||||||
with_directionality_markers: bool,
|
with_directionality_markers: bool,
|
||||||
) -> LazyFallbackBundle {
|
) -> LazyFallbackBundle {
|
||||||
Lrc::new(LazyLock::new(move || {
|
Arc::new(LazyLock::new(move || {
|
||||||
let mut fallback_bundle = new_bundle(vec![langid!("en-US")]);
|
let mut fallback_bundle = new_bundle(vec![langid!("en-US")]);
|
||||||
|
|
||||||
register_functions(&mut fallback_bundle);
|
register_functions(&mut fallback_bundle);
|
||||||
|
|
|
@ -5,8 +5,9 @@
|
||||||
//!
|
//!
|
||||||
//! [annotate_snippets]: https://docs.rs/crate/annotate-snippets/
|
//! [annotate_snippets]: https://docs.rs/crate/annotate-snippets/
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use annotate_snippets::{Renderer, Snippet};
|
use annotate_snippets::{Renderer, Snippet};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_error_messages::FluentArgs;
|
use rustc_error_messages::FluentArgs;
|
||||||
use rustc_span::SourceFile;
|
use rustc_span::SourceFile;
|
||||||
use rustc_span::source_map::SourceMap;
|
use rustc_span::source_map::SourceMap;
|
||||||
|
@ -22,8 +23,8 @@ use crate::{
|
||||||
|
|
||||||
/// Generates diagnostics using annotate-snippet
|
/// Generates diagnostics using annotate-snippet
|
||||||
pub struct AnnotateSnippetEmitter {
|
pub struct AnnotateSnippetEmitter {
|
||||||
source_map: Option<Lrc<SourceMap>>,
|
source_map: Option<Arc<SourceMap>>,
|
||||||
fluent_bundle: Option<Lrc<FluentBundle>>,
|
fluent_bundle: Option<Arc<FluentBundle>>,
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
|
|
||||||
/// If true, hides the longer explanation text
|
/// If true, hides the longer explanation text
|
||||||
|
@ -80,7 +81,7 @@ impl Emitter for AnnotateSnippetEmitter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Provides the source string for the given `line` of `file`
|
/// Provides the source string for the given `line` of `file`
|
||||||
fn source_string(file: Lrc<SourceFile>, line: &Line) -> String {
|
fn source_string(file: Arc<SourceFile>, line: &Line) -> String {
|
||||||
file.get_line(line.line_index - 1).map(|a| a.to_string()).unwrap_or_default()
|
file.get_line(line.line_index - 1).map(|a| a.to_string()).unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -102,8 +103,8 @@ fn annotation_level_for_level(level: Level) -> annotate_snippets::Level {
|
||||||
|
|
||||||
impl AnnotateSnippetEmitter {
|
impl AnnotateSnippetEmitter {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
source_map: Option<Lrc<SourceMap>>,
|
source_map: Option<Arc<SourceMap>>,
|
||||||
fluent_bundle: Option<Lrc<FluentBundle>>,
|
fluent_bundle: Option<Arc<FluentBundle>>,
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
short_message: bool,
|
short_message: bool,
|
||||||
macro_backtrace: bool,
|
macro_backtrace: bool,
|
||||||
|
@ -174,7 +175,7 @@ impl AnnotateSnippetEmitter {
|
||||||
source_map.ensure_source_file_source_present(&file);
|
source_map.ensure_source_file_source_present(&file);
|
||||||
(
|
(
|
||||||
format!("{}", source_map.filename_for_diagnostics(&file.name)),
|
format!("{}", source_map.filename_for_diagnostics(&file.name)),
|
||||||
source_string(Lrc::clone(&file), &line),
|
source_string(Arc::clone(&file), &line),
|
||||||
line.line_index,
|
line.line_index,
|
||||||
line.annotations,
|
line.annotations,
|
||||||
)
|
)
|
||||||
|
|
|
@ -14,10 +14,11 @@ use std::io::prelude::*;
|
||||||
use std::io::{self, IsTerminal};
|
use std::io::{self, IsTerminal};
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use derive_setters::Setters;
|
use derive_setters::Setters;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::sync::{DynSend, IntoDynSyncSend, Lrc};
|
use rustc_data_structures::sync::{DynSend, IntoDynSyncSend};
|
||||||
use rustc_error_messages::{FluentArgs, SpanLabel};
|
use rustc_error_messages::{FluentArgs, SpanLabel};
|
||||||
use rustc_lexer;
|
use rustc_lexer;
|
||||||
use rustc_lint_defs::pluralize;
|
use rustc_lint_defs::pluralize;
|
||||||
|
@ -610,8 +611,8 @@ pub enum OutputTheme {
|
||||||
pub struct HumanEmitter {
|
pub struct HumanEmitter {
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
dst: IntoDynSyncSend<Destination>,
|
dst: IntoDynSyncSend<Destination>,
|
||||||
sm: Option<Lrc<SourceMap>>,
|
sm: Option<Arc<SourceMap>>,
|
||||||
fluent_bundle: Option<Lrc<FluentBundle>>,
|
fluent_bundle: Option<Arc<FluentBundle>>,
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
short_message: bool,
|
short_message: bool,
|
||||||
|
@ -628,7 +629,7 @@ pub struct HumanEmitter {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct FileWithAnnotatedLines {
|
pub(crate) struct FileWithAnnotatedLines {
|
||||||
pub(crate) file: Lrc<SourceFile>,
|
pub(crate) file: Arc<SourceFile>,
|
||||||
pub(crate) lines: Vec<Line>,
|
pub(crate) lines: Vec<Line>,
|
||||||
multiline_depth: usize,
|
multiline_depth: usize,
|
||||||
}
|
}
|
||||||
|
@ -712,7 +713,7 @@ impl HumanEmitter {
|
||||||
fn render_source_line(
|
fn render_source_line(
|
||||||
&self,
|
&self,
|
||||||
buffer: &mut StyledBuffer,
|
buffer: &mut StyledBuffer,
|
||||||
file: Lrc<SourceFile>,
|
file: Arc<SourceFile>,
|
||||||
line: &Line,
|
line: &Line,
|
||||||
width_offset: usize,
|
width_offset: usize,
|
||||||
code_offset: usize,
|
code_offset: usize,
|
||||||
|
@ -1691,7 +1692,7 @@ impl HumanEmitter {
|
||||||
// Get the left-side margin to remove it
|
// Get the left-side margin to remove it
|
||||||
let mut whitespace_margin = usize::MAX;
|
let mut whitespace_margin = usize::MAX;
|
||||||
for line_idx in 0..annotated_file.lines.len() {
|
for line_idx in 0..annotated_file.lines.len() {
|
||||||
let file = Lrc::clone(&annotated_file.file);
|
let file = Arc::clone(&annotated_file.file);
|
||||||
let line = &annotated_file.lines[line_idx];
|
let line = &annotated_file.lines[line_idx];
|
||||||
if let Some(source_string) =
|
if let Some(source_string) =
|
||||||
line.line_index.checked_sub(1).and_then(|l| file.get_line(l))
|
line.line_index.checked_sub(1).and_then(|l| file.get_line(l))
|
||||||
|
@ -1787,7 +1788,7 @@ impl HumanEmitter {
|
||||||
|
|
||||||
let depths = self.render_source_line(
|
let depths = self.render_source_line(
|
||||||
&mut buffer,
|
&mut buffer,
|
||||||
Lrc::clone(&annotated_file.file),
|
Arc::clone(&annotated_file.file),
|
||||||
&annotated_file.lines[line_idx],
|
&annotated_file.lines[line_idx],
|
||||||
width_offset,
|
width_offset,
|
||||||
code_offset,
|
code_offset,
|
||||||
|
@ -2976,7 +2977,7 @@ impl FileWithAnnotatedLines {
|
||||||
) -> Vec<FileWithAnnotatedLines> {
|
) -> Vec<FileWithAnnotatedLines> {
|
||||||
fn add_annotation_to_file(
|
fn add_annotation_to_file(
|
||||||
file_vec: &mut Vec<FileWithAnnotatedLines>,
|
file_vec: &mut Vec<FileWithAnnotatedLines>,
|
||||||
file: Lrc<SourceFile>,
|
file: Arc<SourceFile>,
|
||||||
line_index: usize,
|
line_index: usize,
|
||||||
ann: Annotation,
|
ann: Annotation,
|
||||||
) {
|
) {
|
||||||
|
@ -3113,7 +3114,7 @@ impl FileWithAnnotatedLines {
|
||||||
// | baz
|
// | baz
|
||||||
add_annotation_to_file(
|
add_annotation_to_file(
|
||||||
&mut output,
|
&mut output,
|
||||||
Lrc::clone(&file),
|
Arc::clone(&file),
|
||||||
ann.line_start,
|
ann.line_start,
|
||||||
ann.as_start(),
|
ann.as_start(),
|
||||||
);
|
);
|
||||||
|
@ -3140,12 +3141,12 @@ impl FileWithAnnotatedLines {
|
||||||
.unwrap_or(ann.line_start);
|
.unwrap_or(ann.line_start);
|
||||||
for line in ann.line_start + 1..until {
|
for line in ann.line_start + 1..until {
|
||||||
// Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`).
|
// Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`).
|
||||||
add_annotation_to_file(&mut output, Lrc::clone(&file), line, ann.as_line());
|
add_annotation_to_file(&mut output, Arc::clone(&file), line, ann.as_line());
|
||||||
}
|
}
|
||||||
let line_end = ann.line_end - 1;
|
let line_end = ann.line_end - 1;
|
||||||
let end_is_empty = file.get_line(line_end - 1).is_some_and(|s| !filter(&s));
|
let end_is_empty = file.get_line(line_end - 1).is_some_and(|s| !filter(&s));
|
||||||
if middle < line_end && !end_is_empty {
|
if middle < line_end && !end_is_empty {
|
||||||
add_annotation_to_file(&mut output, Lrc::clone(&file), line_end, ann.as_line());
|
add_annotation_to_file(&mut output, Arc::clone(&file), line_end, ann.as_line());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
end_ann.annotation_type = AnnotationType::Singleline;
|
end_ann.annotation_type = AnnotationType::Singleline;
|
||||||
|
|
|
@ -16,7 +16,7 @@ use std::sync::{Arc, Mutex};
|
||||||
use std::vec;
|
use std::vec;
|
||||||
|
|
||||||
use derive_setters::Setters;
|
use derive_setters::Setters;
|
||||||
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
|
use rustc_data_structures::sync::IntoDynSyncSend;
|
||||||
use rustc_error_messages::FluentArgs;
|
use rustc_error_messages::FluentArgs;
|
||||||
use rustc_lint_defs::Applicability;
|
use rustc_lint_defs::Applicability;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
@ -45,8 +45,8 @@ pub struct JsonEmitter {
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
dst: IntoDynSyncSend<Box<dyn Write + Send>>,
|
dst: IntoDynSyncSend<Box<dyn Write + Send>>,
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
sm: Option<Lrc<SourceMap>>,
|
sm: Option<Arc<SourceMap>>,
|
||||||
fluent_bundle: Option<Lrc<FluentBundle>>,
|
fluent_bundle: Option<Arc<FluentBundle>>,
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
#[setters(skip)]
|
#[setters(skip)]
|
||||||
|
@ -65,7 +65,7 @@ pub struct JsonEmitter {
|
||||||
impl JsonEmitter {
|
impl JsonEmitter {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
dst: Box<dyn Write + Send>,
|
dst: Box<dyn Write + Send>,
|
||||||
sm: Option<Lrc<SourceMap>>,
|
sm: Option<Arc<SourceMap>>,
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
pretty: bool,
|
pretty: bool,
|
||||||
json_rendered: HumanReadableErrorType,
|
json_rendered: HumanReadableErrorType,
|
||||||
|
@ -369,7 +369,7 @@ impl Diagnostic {
|
||||||
ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)),
|
ColorConfig::Always | ColorConfig::Auto => dst = Box::new(termcolor::Ansi::new(dst)),
|
||||||
ColorConfig::Never => {}
|
ColorConfig::Never => {}
|
||||||
}
|
}
|
||||||
HumanEmitter::new(dst, Lrc::clone(&je.fallback_bundle))
|
HumanEmitter::new(dst, Arc::clone(&je.fallback_bundle))
|
||||||
.short_message(short)
|
.short_message(short)
|
||||||
.sm(je.sm.clone())
|
.sm(je.sm.clone())
|
||||||
.fluent_bundle(je.fluent_bundle.clone())
|
.fluent_bundle(je.fluent_bundle.clone())
|
||||||
|
|
|
@ -39,7 +39,7 @@ impl<T: Write> Write for Shared<T> {
|
||||||
/// Test the span yields correct positions in JSON.
|
/// Test the span yields correct positions in JSON.
|
||||||
fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
|
fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
|
||||||
rustc_span::create_default_session_globals_then(|| {
|
rustc_span::create_default_session_globals_then(|| {
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned());
|
sm.new_source_file(Path::new("test.rs").to_owned().into(), code.to_owned());
|
||||||
let fallback_bundle =
|
let fallback_bundle =
|
||||||
crate::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
|
crate::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false);
|
||||||
|
|
|
@ -3,6 +3,7 @@ use std::iter;
|
||||||
use std::path::Component::Prefix;
|
use std::path::Component::Prefix;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::{AttributeExt, MarkedAttrs};
|
use rustc_ast::attr::{AttributeExt, MarkedAttrs};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
@ -12,7 +13,7 @@ use rustc_ast::visit::{AssocCtxt, Visitor};
|
||||||
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind};
|
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind};
|
||||||
use rustc_attr_parsing::{self as attr, Deprecation, Stability};
|
use rustc_attr_parsing::{self as attr, Deprecation, Stability};
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::sync::{self, Lrc};
|
use rustc_data_structures::sync;
|
||||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult};
|
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult};
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
|
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
|
||||||
|
@ -727,7 +728,7 @@ pub struct SyntaxExtension {
|
||||||
/// Span of the macro definition.
|
/// Span of the macro definition.
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
/// List of unstable features that are treated as stable inside this macro.
|
/// List of unstable features that are treated as stable inside this macro.
|
||||||
pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
pub allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||||
/// The macro's stability info.
|
/// The macro's stability info.
|
||||||
pub stability: Option<Stability>,
|
pub stability: Option<Stability>,
|
||||||
/// The macro's deprecation info.
|
/// The macro's deprecation info.
|
||||||
|
@ -986,7 +987,7 @@ pub struct Indeterminate;
|
||||||
pub struct DeriveResolution {
|
pub struct DeriveResolution {
|
||||||
pub path: ast::Path,
|
pub path: ast::Path,
|
||||||
pub item: Annotatable,
|
pub item: Annotatable,
|
||||||
pub exts: Option<Lrc<SyntaxExtension>>,
|
pub exts: Option<Arc<SyntaxExtension>>,
|
||||||
pub is_const: bool,
|
pub is_const: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1017,7 +1018,7 @@ pub trait ResolverExpand {
|
||||||
invoc: &Invocation,
|
invoc: &Invocation,
|
||||||
eager_expansion_root: LocalExpnId,
|
eager_expansion_root: LocalExpnId,
|
||||||
force: bool,
|
force: bool,
|
||||||
) -> Result<Lrc<SyntaxExtension>, Indeterminate>;
|
) -> Result<Arc<SyntaxExtension>, Indeterminate>;
|
||||||
|
|
||||||
fn record_macro_rule_usage(&mut self, mac_id: NodeId, rule_index: usize);
|
fn record_macro_rule_usage(&mut self, mac_id: NodeId, rule_index: usize);
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{iter, mem};
|
use std::{iter, mem};
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
@ -16,7 +17,6 @@ use rustc_ast::{
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
use rustc_parse::parser::{
|
use rustc_parse::parser::{
|
||||||
|
@ -579,7 +579,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
&mut self,
|
&mut self,
|
||||||
mut fragment: AstFragment,
|
mut fragment: AstFragment,
|
||||||
extra_placeholders: &[NodeId],
|
extra_placeholders: &[NodeId],
|
||||||
) -> (AstFragment, Vec<(Invocation, Option<Lrc<SyntaxExtension>>)>) {
|
) -> (AstFragment, Vec<(Invocation, Option<Arc<SyntaxExtension>>)>) {
|
||||||
// Resolve `$crate`s in the fragment for pretty-printing.
|
// Resolve `$crate`s in the fragment for pretty-printing.
|
||||||
self.cx.resolver.resolve_dollar_crates();
|
self.cx.resolver.resolve_dollar_crates();
|
||||||
|
|
||||||
|
@ -1774,7 +1774,7 @@ fn build_single_delegations<'a, Node: InvocationCollectorNode>(
|
||||||
|
|
||||||
struct InvocationCollector<'a, 'b> {
|
struct InvocationCollector<'a, 'b> {
|
||||||
cx: &'a mut ExtCtxt<'b>,
|
cx: &'a mut ExtCtxt<'b>,
|
||||||
invocations: Vec<(Invocation, Option<Lrc<SyntaxExtension>>)>,
|
invocations: Vec<(Invocation, Option<Arc<SyntaxExtension>>)>,
|
||||||
monotonic: bool,
|
monotonic: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::ExprKind;
|
use rustc_ast::ExprKind;
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, LitKind, Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, LitKind, Nonterminal, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
|
use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
|
||||||
use rustc_parse::lexer::nfc_normalize;
|
use rustc_parse::lexer::nfc_normalize;
|
||||||
use rustc_parse::parser::ParseNtResult;
|
use rustc_parse::parser::ParseNtResult;
|
||||||
|
@ -299,7 +299,7 @@ pub(super) fn transcribe<'a>(
|
||||||
marker.visit_span(&mut sp);
|
marker.visit_span(&mut sp);
|
||||||
let use_span = nt.use_span();
|
let use_span = nt.use_span();
|
||||||
with_metavar_spans(|mspans| mspans.insert(use_span, sp));
|
with_metavar_spans(|mspans| mspans.insert(use_span, sp));
|
||||||
TokenTree::token_alone(token::Interpolated(Lrc::clone(nt)), sp)
|
TokenTree::token_alone(token::Interpolated(Arc::clone(nt)), sp)
|
||||||
}
|
}
|
||||||
MatchedSeq(..) => {
|
MatchedSeq(..) => {
|
||||||
// We were unable to descend far enough. This is an error.
|
// We were unable to descend far enough. This is an error.
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use std::ops::{Bound, Range};
|
use std::ops::{Bound, Range};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ast::token::IdentIsRaw;
|
use ast::token::IdentIsRaw;
|
||||||
use pm::bridge::{
|
use pm::bridge::{
|
||||||
|
@ -11,7 +12,6 @@ use rustc_ast::tokenstream::{self, DelimSpacing, Spacing, TokenStream};
|
||||||
use rustc_ast::util::literal::escape_byte_str_symbol;
|
use rustc_ast::util::literal::escape_byte_str_symbol;
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
|
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
|
||||||
use rustc_parse::lexer::nfc_normalize;
|
use rustc_parse::lexer::nfc_normalize;
|
||||||
use rustc_parse::parser::Parser;
|
use rustc_parse::parser::Parser;
|
||||||
|
@ -446,7 +446,7 @@ impl<'a, 'b> Rustc<'a, 'b> {
|
||||||
impl server::Types for Rustc<'_, '_> {
|
impl server::Types for Rustc<'_, '_> {
|
||||||
type FreeFunctions = FreeFunctions;
|
type FreeFunctions = FreeFunctions;
|
||||||
type TokenStream = TokenStream;
|
type TokenStream = TokenStream;
|
||||||
type SourceFile = Lrc<SourceFile>;
|
type SourceFile = Arc<SourceFile>;
|
||||||
type Span = Span;
|
type Span = Span;
|
||||||
type Symbol = Symbol;
|
type Symbol = Symbol;
|
||||||
}
|
}
|
||||||
|
@ -657,7 +657,7 @@ impl server::TokenStream for Rustc<'_, '_> {
|
||||||
|
|
||||||
impl server::SourceFile for Rustc<'_, '_> {
|
impl server::SourceFile for Rustc<'_, '_> {
|
||||||
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
|
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
|
||||||
Lrc::ptr_eq(file1, file2)
|
Arc::ptr_eq(file1, file2)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path(&mut self, file: &Self::SourceFile) -> String {
|
fn path(&mut self, file: &Self::SourceFile) -> String {
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::result;
|
use std::result;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::{LitKind, MetaItemKind, token};
|
use rustc_ast::{LitKind, MetaItemKind, token};
|
||||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
use rustc_data_structures::jobserver;
|
use rustc_data_structures::jobserver;
|
||||||
use rustc_data_structures::stable_hasher::StableHasher;
|
use rustc_data_structures::stable_hasher::StableHasher;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::registry::Registry;
|
use rustc_errors::registry::Registry;
|
||||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed};
|
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed};
|
||||||
use rustc_lint::LintStore;
|
use rustc_lint::LintStore;
|
||||||
|
@ -490,7 +490,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
||||||
if let Some(register_lints) = config.register_lints.as_deref() {
|
if let Some(register_lints) = config.register_lints.as_deref() {
|
||||||
register_lints(&sess, &mut lint_store);
|
register_lints(&sess, &mut lint_store);
|
||||||
}
|
}
|
||||||
sess.lint_store = Some(Lrc::new(lint_store));
|
sess.lint_store = Some(Arc::new(lint_store));
|
||||||
|
|
||||||
util::check_abi_required_features(&sess);
|
util::check_abi_required_features(&sess);
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ use rustc_ast as ast;
|
||||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||||
use rustc_data_structures::parallel;
|
use rustc_data_structures::parallel;
|
||||||
use rustc_data_structures::steal::Steal;
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, Lrc, OnceLock, WorkerLocal};
|
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, OnceLock, WorkerLocal};
|
||||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
|
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
use rustc_fs_util::try_canonicalize;
|
use rustc_fs_util::try_canonicalize;
|
||||||
|
@ -601,7 +601,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
|
||||||
fn resolver_for_lowering_raw<'tcx>(
|
fn resolver_for_lowering_raw<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
(): (),
|
(): (),
|
||||||
) -> (&'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)>, &'tcx ty::ResolverGlobalCtxt) {
|
) -> (&'tcx Steal<(ty::ResolverAstLowering, Arc<ast::Crate>)>, &'tcx ty::ResolverGlobalCtxt) {
|
||||||
let arenas = Resolver::arenas();
|
let arenas = Resolver::arenas();
|
||||||
let _ = tcx.registered_tools(()); // Uses `crate_for_resolver`.
|
let _ = tcx.registered_tools(()); // Uses `crate_for_resolver`.
|
||||||
let (krate, pre_configured_attrs) = tcx.crate_for_resolver(()).steal();
|
let (krate, pre_configured_attrs) = tcx.crate_for_resolver(()).steal();
|
||||||
|
@ -623,7 +623,7 @@ fn resolver_for_lowering_raw<'tcx>(
|
||||||
} = resolver.into_outputs();
|
} = resolver.into_outputs();
|
||||||
|
|
||||||
let resolutions = tcx.arena.alloc(untracked_resolutions);
|
let resolutions = tcx.arena.alloc(untracked_resolutions);
|
||||||
(tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Lrc::new(krate)))), resolutions)
|
(tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Arc::new(krate)))), resolutions)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write_dep_info(tcx: TyCtxt<'_>) {
|
pub fn write_dep_info(tcx: TyCtxt<'_>) {
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
use std::iter::TrustedLen;
|
use std::iter::TrustedLen;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{io, iter, mem};
|
use std::{io, iter, mem};
|
||||||
|
|
||||||
pub(super) use cstore_impl::provide;
|
pub(super) use cstore_impl::provide;
|
||||||
|
@ -11,7 +12,7 @@ use rustc_data_structures::captures::Captures;
|
||||||
use rustc_data_structures::fingerprint::Fingerprint;
|
use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::owned_slice::OwnedSlice;
|
use rustc_data_structures::owned_slice::OwnedSlice;
|
||||||
use rustc_data_structures::sync::{Lock, Lrc, OnceLock};
|
use rustc_data_structures::sync::{Lock, OnceLock};
|
||||||
use rustc_data_structures::unhash::UnhashMap;
|
use rustc_data_structures::unhash::UnhashMap;
|
||||||
use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind};
|
use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind};
|
||||||
use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro};
|
use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro};
|
||||||
|
@ -113,7 +114,7 @@ pub(crate) struct CrateMetadata {
|
||||||
/// How to link (or not link) this crate to the currently compiled crate.
|
/// How to link (or not link) this crate to the currently compiled crate.
|
||||||
dep_kind: CrateDepKind,
|
dep_kind: CrateDepKind,
|
||||||
/// Filesystem location of this crate.
|
/// Filesystem location of this crate.
|
||||||
source: Lrc<CrateSource>,
|
source: Arc<CrateSource>,
|
||||||
/// Whether or not this crate should be consider a private dependency.
|
/// Whether or not this crate should be consider a private dependency.
|
||||||
/// Used by the 'exported_private_dependencies' lint, and for determining
|
/// Used by the 'exported_private_dependencies' lint, and for determining
|
||||||
/// whether to emit suggestions that reference this crate.
|
/// whether to emit suggestions that reference this crate.
|
||||||
|
@ -145,7 +146,7 @@ struct ImportedSourceFile {
|
||||||
/// The end of this SourceFile within the source_map of its original crate
|
/// The end of this SourceFile within the source_map of its original crate
|
||||||
original_end_pos: rustc_span::BytePos,
|
original_end_pos: rustc_span::BytePos,
|
||||||
/// The imported SourceFile's representation within the local source_map
|
/// The imported SourceFile's representation within the local source_map
|
||||||
translated_source_file: Lrc<rustc_span::SourceFile>,
|
translated_source_file: Arc<rustc_span::SourceFile>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) struct DecodeContext<'a, 'tcx> {
|
pub(super) struct DecodeContext<'a, 'tcx> {
|
||||||
|
@ -1855,7 +1856,7 @@ impl CrateMetadata {
|
||||||
cnum_map,
|
cnum_map,
|
||||||
dependencies,
|
dependencies,
|
||||||
dep_kind,
|
dep_kind,
|
||||||
source: Lrc::new(source),
|
source: Arc::new(source),
|
||||||
private_dep,
|
private_dep,
|
||||||
host_hash,
|
host_hash,
|
||||||
used: false,
|
used: false,
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use std::any::Any;
|
use std::any::Any;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_attr_parsing::Deprecation;
|
use rustc_attr_parsing::Deprecation;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir::def::{CtorKind, DefKind, Res};
|
use rustc_hir::def::{CtorKind, DefKind, Res};
|
||||||
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LOCAL_CRATE};
|
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LOCAL_CRATE};
|
||||||
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
|
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
|
||||||
|
@ -409,7 +409,7 @@ provide! { tcx, def_id, other, cdata,
|
||||||
matches!(cdata.extern_crate, Some(extern_crate) if !extern_crate.is_direct())
|
matches!(cdata.extern_crate, Some(extern_crate) if !extern_crate.is_direct())
|
||||||
}
|
}
|
||||||
|
|
||||||
used_crate_source => { Lrc::clone(&cdata.source) }
|
used_crate_source => { Arc::clone(&cdata.source) }
|
||||||
debugger_visualizers => { cdata.get_debugger_visualizers() }
|
debugger_visualizers => { cdata.get_debugger_visualizers() }
|
||||||
|
|
||||||
exported_symbols => {
|
exported_symbols => {
|
||||||
|
@ -548,7 +548,7 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
|
||||||
visible_parent_map
|
visible_parent_map
|
||||||
},
|
},
|
||||||
|
|
||||||
dependency_formats: |tcx, ()| Lrc::new(crate::dependency_format::calculate(tcx)),
|
dependency_formats: |tcx, ()| Arc::new(crate::dependency_format::calculate(tcx)),
|
||||||
has_global_allocator: |tcx, LocalCrate| CStore::from_tcx(tcx).has_global_allocator(),
|
has_global_allocator: |tcx, LocalCrate| CStore::from_tcx(tcx).has_global_allocator(),
|
||||||
has_alloc_error_handler: |tcx, LocalCrate| CStore::from_tcx(tcx).has_alloc_error_handler(),
|
has_alloc_error_handler: |tcx, LocalCrate| CStore::from_tcx(tcx).has_alloc_error_handler(),
|
||||||
postorder_cnums: |tcx, ()| {
|
postorder_cnums: |tcx, ()| {
|
||||||
|
|
|
@ -3,11 +3,12 @@ use std::collections::hash_map::Entry;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{Read, Seek, Write};
|
use std::io::{Read, Seek, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttributeExt;
|
use rustc_ast::attr::AttributeExt;
|
||||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::memmap::{Mmap, MmapMut};
|
use rustc_data_structures::memmap::{Mmap, MmapMut};
|
||||||
use rustc_data_structures::sync::{Lrc, join, par_for_each_in};
|
use rustc_data_structures::sync::{join, par_for_each_in};
|
||||||
use rustc_data_structures::temp_dir::MaybeTempDir;
|
use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||||
use rustc_data_structures::thousands::format_with_underscores;
|
use rustc_data_structures::thousands::format_with_underscores;
|
||||||
use rustc_feature::Features;
|
use rustc_feature::Features;
|
||||||
|
@ -52,7 +53,7 @@ pub(super) struct EncodeContext<'a, 'tcx> {
|
||||||
// This is used to speed up Span encoding.
|
// This is used to speed up Span encoding.
|
||||||
// The `usize` is an index into the `MonotonicVec`
|
// The `usize` is an index into the `MonotonicVec`
|
||||||
// that stores the `SourceFile`
|
// that stores the `SourceFile`
|
||||||
source_file_cache: (Lrc<SourceFile>, usize),
|
source_file_cache: (Arc<SourceFile>, usize),
|
||||||
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
// The indices (into the `SourceMap`'s `MonotonicVec`)
|
||||||
// of all of the `SourceFiles` that we need to serialize.
|
// of all of the `SourceFiles` that we need to serialize.
|
||||||
// When we serialize a `Span`, we insert the index of its
|
// When we serialize a `Span`, we insert the index of its
|
||||||
|
@ -278,7 +279,7 @@ impl<'a, 'tcx> Encodable<EncodeContext<'a, 'tcx>> for SpanData {
|
||||||
let source_map = s.tcx.sess.source_map();
|
let source_map = s.tcx.sess.source_map();
|
||||||
let source_file_index = source_map.lookup_source_file_idx(self.lo);
|
let source_file_index = source_map.lookup_source_file_idx(self.lo);
|
||||||
s.source_file_cache =
|
s.source_file_cache =
|
||||||
(Lrc::clone(&source_map.files()[source_file_index]), source_file_index);
|
(Arc::clone(&source_map.files()[source_file_index]), source_file_index);
|
||||||
}
|
}
|
||||||
let (ref source_file, source_file_index) = s.source_file_cache;
|
let (ref source_file, source_file_index) = s.source_file_cache;
|
||||||
debug_assert!(source_file.contains(self.lo));
|
debug_assert!(source_file.contains(self.lo));
|
||||||
|
@ -2298,7 +2299,7 @@ pub fn encode_metadata(tcx: TyCtxt<'_>, path: &Path) {
|
||||||
encoder.emit_raw_bytes(&0u64.to_le_bytes());
|
encoder.emit_raw_bytes(&0u64.to_le_bytes());
|
||||||
|
|
||||||
let source_map_files = tcx.sess.source_map().files();
|
let source_map_files = tcx.sess.source_map().files();
|
||||||
let source_file_cache = (Lrc::clone(&source_map_files[0]), 0);
|
let source_file_cache = (Arc::clone(&source_map_files[0]), 0);
|
||||||
let required_source_files = Some(FxIndexSet::default());
|
let required_source_files = Some(FxIndexSet::default());
|
||||||
drop(source_map_files);
|
drop(source_map_files);
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ macro_rules! arena_types {
|
||||||
[decode] borrowck_result: rustc_middle::mir::BorrowCheckResult<'tcx>,
|
[decode] borrowck_result: rustc_middle::mir::BorrowCheckResult<'tcx>,
|
||||||
[] resolver: rustc_data_structures::steal::Steal<(
|
[] resolver: rustc_data_structures::steal::Steal<(
|
||||||
rustc_middle::ty::ResolverAstLowering,
|
rustc_middle::ty::ResolverAstLowering,
|
||||||
rustc_data_structures::sync::Lrc<rustc_ast::Crate>,
|
std::sync::Arc<rustc_ast::Crate>,
|
||||||
)>,
|
)>,
|
||||||
[] crate_for_resolver: rustc_data_structures::steal::Steal<(rustc_ast::Crate, rustc_ast::AttrVec)>,
|
[] crate_for_resolver: rustc_data_structures::steal::Steal<(rustc_ast::Crate, rustc_ast::AttrVec)>,
|
||||||
[] resolutions: rustc_middle::ty::ResolverGlobalCtxt,
|
[] resolutions: rustc_middle::ty::ResolverGlobalCtxt,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable};
|
use rustc_macros::{Decodable, Encodable, HashStable};
|
||||||
|
|
||||||
#[derive(HashStable)]
|
#[derive(HashStable)]
|
||||||
|
@ -15,7 +15,7 @@ pub enum DebuggerVisualizerType {
|
||||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Encodable, Decodable)]
|
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Encodable, Decodable)]
|
||||||
pub struct DebuggerVisualizerFile {
|
pub struct DebuggerVisualizerFile {
|
||||||
/// The complete debugger visualizer source.
|
/// The complete debugger visualizer source.
|
||||||
pub src: Lrc<[u8]>,
|
pub src: Arc<[u8]>,
|
||||||
/// Indicates which visualizer type this targets.
|
/// Indicates which visualizer type this targets.
|
||||||
pub visualizer_type: DebuggerVisualizerType,
|
pub visualizer_type: DebuggerVisualizerType,
|
||||||
/// The file path to the visualizer file. This is used for reporting
|
/// The file path to the visualizer file. This is used for reporting
|
||||||
|
@ -26,13 +26,13 @@ pub struct DebuggerVisualizerFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DebuggerVisualizerFile {
|
impl DebuggerVisualizerFile {
|
||||||
pub fn new(src: Lrc<[u8]>, visualizer_type: DebuggerVisualizerType, path: PathBuf) -> Self {
|
pub fn new(src: Arc<[u8]>, visualizer_type: DebuggerVisualizerType, path: PathBuf) -> Self {
|
||||||
DebuggerVisualizerFile { src, visualizer_type, path: Some(path) }
|
DebuggerVisualizerFile { src, visualizer_type, path: Some(path) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path_erased(&self) -> Self {
|
pub fn path_erased(&self) -> Self {
|
||||||
DebuggerVisualizerFile {
|
DebuggerVisualizerFile {
|
||||||
src: Lrc::clone(&self.src),
|
src: Arc::clone(&self.src),
|
||||||
visualizer_type: self.visualizer_type,
|
visualizer_type: self.visualizer_type,
|
||||||
path: None,
|
path: None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::sorted_map::SortedMap;
|
use rustc_data_structures::sorted_map::SortedMap;
|
||||||
use rustc_data_structures::steal::Steal;
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::svh::Svh;
|
use rustc_data_structures::svh::Svh;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
||||||
use rustc_errors::ErrorGuaranteed;
|
use rustc_errors::ErrorGuaranteed;
|
||||||
use rustc_hir::def::{DefKind, DocLinkResMap};
|
use rustc_hir::def::{DefKind, DocLinkResMap};
|
||||||
|
@ -125,7 +124,7 @@ rustc_queries! {
|
||||||
desc { "getting the resolver outputs" }
|
desc { "getting the resolver outputs" }
|
||||||
}
|
}
|
||||||
|
|
||||||
query resolver_for_lowering_raw(_: ()) -> (&'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)>, &'tcx ty::ResolverGlobalCtxt) {
|
query resolver_for_lowering_raw(_: ()) -> (&'tcx Steal<(ty::ResolverAstLowering, Arc<ast::Crate>)>, &'tcx ty::ResolverGlobalCtxt) {
|
||||||
eval_always
|
eval_always
|
||||||
no_hash
|
no_hash
|
||||||
desc { "getting the resolver for lowering" }
|
desc { "getting the resolver for lowering" }
|
||||||
|
@ -1628,7 +1627,7 @@ rustc_queries! {
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
|
|
||||||
query dependency_formats(_: ()) -> &'tcx Lrc<crate::middle::dependency_format::Dependencies> {
|
query dependency_formats(_: ()) -> &'tcx Arc<crate::middle::dependency_format::Dependencies> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "getting the linkage format of all dependencies" }
|
desc { "getting the linkage format of all dependencies" }
|
||||||
}
|
}
|
||||||
|
@ -2077,7 +2076,7 @@ rustc_queries! {
|
||||||
desc { "seeing if we're missing an `extern crate` item for this crate" }
|
desc { "seeing if we're missing an `extern crate` item for this crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
query used_crate_source(_: CrateNum) -> &'tcx Lrc<CrateSource> {
|
query used_crate_source(_: CrateNum) -> &'tcx Arc<CrateSource> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "looking at the source for a crate" }
|
desc { "looking at the source for a crate" }
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||||
use rustc_data_structures::memmap::Mmap;
|
use rustc_data_structures::memmap::Mmap;
|
||||||
use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, RwLock};
|
use rustc_data_structures::sync::{HashMapExt, Lock, RwLock};
|
||||||
use rustc_data_structures::unhash::UnhashMap;
|
use rustc_data_structures::unhash::UnhashMap;
|
||||||
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
||||||
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE, LocalDefId, StableCrateId};
|
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE, LocalDefId, StableCrateId};
|
||||||
|
@ -60,7 +61,7 @@ pub struct OnDiskCache {
|
||||||
file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
||||||
|
|
||||||
// Caches that are populated lazily during decoding.
|
// Caches that are populated lazily during decoding.
|
||||||
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Arc<SourceFile>>>,
|
||||||
|
|
||||||
// A map from dep-node to the position of the cached query result in
|
// A map from dep-node to the position of the cached query result in
|
||||||
// `serialized_data`.
|
// `serialized_data`.
|
||||||
|
@ -453,7 +454,7 @@ impl OnDiskCache {
|
||||||
pub struct CacheDecoder<'a, 'tcx> {
|
pub struct CacheDecoder<'a, 'tcx> {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
opaque: MemDecoder<'a>,
|
opaque: MemDecoder<'a>,
|
||||||
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Arc<SourceFile>>>,
|
||||||
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
||||||
alloc_decoding_session: AllocDecodingSession<'a>,
|
alloc_decoding_session: AllocDecodingSession<'a>,
|
||||||
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
|
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
|
||||||
|
@ -464,10 +465,10 @@ pub struct CacheDecoder<'a, 'tcx> {
|
||||||
|
|
||||||
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
|
fn file_index_to_file(&self, index: SourceFileIndex) -> Arc<SourceFile> {
|
||||||
let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, .. } = *self;
|
let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, .. } = *self;
|
||||||
|
|
||||||
Lrc::clone(file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
|
Arc::clone(file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
|
||||||
let source_file_id = &file_index_to_stable_id[&index];
|
let source_file_id = &file_index_to_stable_id[&index];
|
||||||
let source_file_cnum = tcx.stable_crate_id_to_crate_num(source_file_id.stable_crate_id);
|
let source_file_cnum = tcx.stable_crate_id_to_crate_num(source_file_id.stable_crate_id);
|
||||||
|
|
||||||
|
@ -824,7 +825,7 @@ pub struct CacheEncoder<'a, 'tcx> {
|
||||||
|
|
||||||
impl<'a, 'tcx> CacheEncoder<'a, 'tcx> {
|
impl<'a, 'tcx> CacheEncoder<'a, 'tcx> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
|
fn source_file_index(&mut self, source_file: Arc<SourceFile>) -> SourceFileIndex {
|
||||||
self.file_to_file_index[&(&raw const *source_file)]
|
self.file_to_file_index[&(&raw const *source_file)]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,8 @@ mod structural_impls;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Applicability, Diag, EmissionGuarantee};
|
use rustc_errors::{Applicability, Diag, EmissionGuarantee};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::HirId;
|
use rustc_hir::HirId;
|
||||||
|
@ -158,7 +158,7 @@ pub struct UnifyReceiverContext<'tcx> {
|
||||||
pub struct InternedObligationCauseCode<'tcx> {
|
pub struct InternedObligationCauseCode<'tcx> {
|
||||||
/// `None` for `ObligationCauseCode::Misc` (a common case, occurs ~60% of
|
/// `None` for `ObligationCauseCode::Misc` (a common case, occurs ~60% of
|
||||||
/// the time). `Some` otherwise.
|
/// the time). `Some` otherwise.
|
||||||
code: Option<Lrc<ObligationCauseCode<'tcx>>>,
|
code: Option<Arc<ObligationCauseCode<'tcx>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> std::fmt::Debug for InternedObligationCauseCode<'tcx> {
|
impl<'tcx> std::fmt::Debug for InternedObligationCauseCode<'tcx> {
|
||||||
|
@ -172,7 +172,7 @@ impl<'tcx> ObligationCauseCode<'tcx> {
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn into(self) -> InternedObligationCauseCode<'tcx> {
|
fn into(self) -> InternedObligationCauseCode<'tcx> {
|
||||||
InternedObligationCauseCode {
|
InternedObligationCauseCode {
|
||||||
code: if let ObligationCauseCode::Misc = self { None } else { Some(Lrc::new(self)) },
|
code: if let ObligationCauseCode::Misc = self { None } else { Some(Arc::new(self)) },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ use std::cmp::Ordering;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::{Bound, Deref};
|
use std::ops::{Bound, Deref};
|
||||||
use std::sync::OnceLock;
|
use std::sync::{Arc, OnceLock};
|
||||||
use std::{fmt, iter, mem};
|
use std::{fmt, iter, mem};
|
||||||
|
|
||||||
use rustc_abi::{ExternAbi, FieldIdx, Layout, LayoutData, TargetDataLayout, VariantIdx};
|
use rustc_abi::{ExternAbi, FieldIdx, Layout, LayoutData, TargetDataLayout, VariantIdx};
|
||||||
|
@ -24,7 +24,7 @@ use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::steal::Steal;
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::sync::{
|
use rustc_data_structures::sync::{
|
||||||
self, DynSend, DynSync, FreezeReadGuard, Lock, Lrc, RwLock, WorkerLocal,
|
self, DynSend, DynSync, FreezeReadGuard, Lock, RwLock, WorkerLocal,
|
||||||
};
|
};
|
||||||
use rustc_data_structures::unord::UnordSet;
|
use rustc_data_structures::unord::UnordSet;
|
||||||
use rustc_errors::{
|
use rustc_errors::{
|
||||||
|
@ -1406,7 +1406,7 @@ impl<'tcx> GlobalCtxt<'tcx> {
|
||||||
pub struct CurrentGcx {
|
pub struct CurrentGcx {
|
||||||
/// This stores a pointer to a `GlobalCtxt`. This is set to `Some` inside `GlobalCtxt::enter`
|
/// This stores a pointer to a `GlobalCtxt`. This is set to `Some` inside `GlobalCtxt::enter`
|
||||||
/// and reset to `None` when that function returns or unwinds.
|
/// and reset to `None` when that function returns or unwinds.
|
||||||
value: Lrc<RwLock<Option<*const ()>>>,
|
value: Arc<RwLock<Option<*const ()>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl DynSend for CurrentGcx {}
|
unsafe impl DynSend for CurrentGcx {}
|
||||||
|
@ -1414,7 +1414,7 @@ unsafe impl DynSync for CurrentGcx {}
|
||||||
|
|
||||||
impl CurrentGcx {
|
impl CurrentGcx {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { value: Lrc::new(RwLock::new(None)) }
|
Self { value: Arc::new(RwLock::new(None)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn access<R>(&self, f: impl for<'tcx> FnOnce(&'tcx GlobalCtxt<'tcx>) -> R) -> R {
|
pub fn access<R>(&self, f: impl for<'tcx> FnOnce(&'tcx GlobalCtxt<'tcx>) -> R) -> R {
|
||||||
|
@ -3224,7 +3224,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
self.resolutions(()).module_children.get(&def_id).map_or(&[], |v| &v[..])
|
self.resolutions(()).module_children.get(&def_id).map_or(&[], |v| &v[..])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resolver_for_lowering(self) -> &'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)> {
|
pub fn resolver_for_lowering(self) -> &'tcx Steal<(ty::ResolverAstLowering, Arc<ast::Crate>)> {
|
||||||
self.resolver_for_lowering_raw(()).0
|
self.resolver_for_lowering_raw(()).0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ pub struct TypeckResults<'tcx> {
|
||||||
|
|
||||||
/// Set of trait imports actually used in the method resolution.
|
/// Set of trait imports actually used in the method resolution.
|
||||||
/// This is used for warning unused imports. During type
|
/// This is used for warning unused imports. During type
|
||||||
/// checking, this `Lrc` should not be cloned: it must have a ref-count
|
/// checking, this `Arc` should not be cloned: it must have a ref-count
|
||||||
/// of 1 so that we can insert things into the set mutably.
|
/// of 1 so that we can insert things into the set mutably.
|
||||||
pub used_trait_imports: UnordSet<LocalDefId>,
|
pub used_trait_imports: UnordSet<LocalDefId>,
|
||||||
|
|
||||||
|
|
|
@ -17,12 +17,12 @@
|
||||||
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::Utf8Error;
|
use std::str::Utf8Error;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::tokenstream::TokenStream;
|
use rustc_ast::tokenstream::TokenStream;
|
||||||
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
|
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
|
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_session::parse::ParseSess;
|
||||||
use rustc_span::source_map::SourceMap;
|
use rustc_span::source_map::SourceMap;
|
||||||
|
@ -147,7 +147,7 @@ pub fn utf8_error<E: EmissionGuarantee>(
|
||||||
/// the initial token stream.
|
/// the initial token stream.
|
||||||
fn new_parser_from_source_file(
|
fn new_parser_from_source_file(
|
||||||
psess: &ParseSess,
|
psess: &ParseSess,
|
||||||
source_file: Lrc<SourceFile>,
|
source_file: Arc<SourceFile>,
|
||||||
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
|
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
|
||||||
let end_pos = source_file.end_position();
|
let end_pos = source_file.end_position();
|
||||||
let stream = source_file_to_stream(psess, source_file, None)?;
|
let stream = source_file_to_stream(psess, source_file, None)?;
|
||||||
|
@ -172,7 +172,7 @@ pub fn source_str_to_stream(
|
||||||
/// parsing the token stream.
|
/// parsing the token stream.
|
||||||
fn source_file_to_stream<'psess>(
|
fn source_file_to_stream<'psess>(
|
||||||
psess: &'psess ParseSess,
|
psess: &'psess ParseSess,
|
||||||
source_file: Lrc<SourceFile>,
|
source_file: Arc<SourceFile>,
|
||||||
override_span: Option<Span>,
|
override_span: Option<Span>,
|
||||||
) -> Result<TokenStream, Vec<Diag<'psess>>> {
|
) -> Result<TokenStream, Vec<Diag<'psess>>> {
|
||||||
let src = source_file.src.as_ref().unwrap_or_else(|| {
|
let src = source_file.src.as_ref().unwrap_or_else(|| {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use std::mem::take;
|
use std::mem::take;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ast::token::IdentIsRaw;
|
use ast::token::IdentIsRaw;
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
@ -14,7 +15,6 @@ use rustc_ast::{
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{
|
use rustc_errors::{
|
||||||
Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, PResult, Subdiagnostic, Suggestions,
|
Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, PResult, Subdiagnostic, Suggestions,
|
||||||
pluralize,
|
pluralize,
|
||||||
|
@ -2403,7 +2403,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut labels = vec![];
|
let mut labels = vec![];
|
||||||
while let TokenKind::Interpolated(nt) = &tok.kind {
|
while let TokenKind::Interpolated(nt) = &tok.kind {
|
||||||
let tokens = nt.tokens();
|
let tokens = nt.tokens();
|
||||||
labels.push(Lrc::clone(nt));
|
labels.push(Arc::clone(nt));
|
||||||
if let Some(tokens) = tokens
|
if let Some(tokens) = tokens
|
||||||
&& let tokens = tokens.to_attr_token_stream()
|
&& let tokens = tokens.to_attr_token_stream()
|
||||||
&& let tokens = tokens.0.deref()
|
&& let tokens = tokens.0.deref()
|
||||||
|
|
|
@ -13,6 +13,7 @@ mod ty;
|
||||||
|
|
||||||
use std::assert_matches::debug_assert_matches;
|
use std::assert_matches::debug_assert_matches;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{fmt, mem, slice};
|
use std::{fmt, mem, slice};
|
||||||
|
|
||||||
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
|
||||||
|
@ -34,7 +35,6 @@ use rustc_ast::{
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
|
use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult};
|
||||||
use rustc_index::interval::IntervalSet;
|
use rustc_index::interval::IntervalSet;
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_session::parse::ParseSess;
|
||||||
|
@ -1685,5 +1685,5 @@ pub enum ParseNtResult {
|
||||||
Lifetime(Ident, IdentIsRaw),
|
Lifetime(Ident, IdentIsRaw),
|
||||||
|
|
||||||
/// This case will eventually be removed, along with `Token::Interpolate`.
|
/// This case will eventually be removed, along with `Token::Interpolate`.
|
||||||
Nt(Lrc<Nonterminal>),
|
Nt(Arc<Nonterminal>),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::HasTokens;
|
use rustc_ast::HasTokens;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::Nonterminal::*;
|
use rustc_ast::token::Nonterminal::*;
|
||||||
|
@ -7,7 +9,6 @@ use rustc_ast::token::{
|
||||||
self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token,
|
self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token,
|
||||||
};
|
};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
use rustc_span::{Ident, kw};
|
use rustc_span::{Ident, kw};
|
||||||
|
|
||||||
|
@ -235,7 +236,7 @@ impl<'a> Parser<'a> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ParseNtResult::Nt(Lrc::new(nt)))
|
Ok(ParseNtResult::Nt(Arc::new(nt)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,6 @@ use rustc_ast::token::{self, Delimiter, Token};
|
||||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::{self as ast, PatKind, visit};
|
use rustc_ast::{self as ast, PatKind, visit};
|
||||||
use rustc_ast_pretty::pprust::item_to_string;
|
use rustc_ast_pretty::pprust::item_to_string;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::emitter::{HumanEmitter, OutputTheme};
|
use rustc_errors::emitter::{HumanEmitter, OutputTheme};
|
||||||
use rustc_errors::{DiagCtxt, MultiSpan, PResult};
|
use rustc_errors::{DiagCtxt, MultiSpan, PResult};
|
||||||
use rustc_session::parse::ParseSess;
|
use rustc_session::parse::ParseSess;
|
||||||
|
@ -39,9 +38,9 @@ fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
|
fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Arc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
|
||||||
let output = Arc::new(Mutex::new(Vec::new()));
|
let output = Arc::new(Mutex::new(Vec::new()));
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
||||||
vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE],
|
vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE],
|
||||||
false,
|
false,
|
||||||
|
|
|
@ -11,7 +11,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
use rustc_data_structures::profiling::{QueryInvocationId, SelfProfilerRef};
|
use rustc_data_structures::profiling::{QueryInvocationId, SelfProfilerRef};
|
||||||
use rustc_data_structures::sharded::{self, Sharded};
|
use rustc_data_structures::sharded::{self, Sharded};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, Lrc};
|
use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock};
|
||||||
use rustc_data_structures::unord::UnordMap;
|
use rustc_data_structures::unord::UnordMap;
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
use rustc_macros::{Decodable, Encodable};
|
use rustc_macros::{Decodable, Encodable};
|
||||||
|
@ -29,13 +29,13 @@ use crate::query::{QueryContext, QuerySideEffects};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct DepGraph<D: Deps> {
|
pub struct DepGraph<D: Deps> {
|
||||||
data: Option<Lrc<DepGraphData<D>>>,
|
data: Option<Arc<DepGraphData<D>>>,
|
||||||
|
|
||||||
/// This field is used for assigning DepNodeIndices when running in
|
/// This field is used for assigning DepNodeIndices when running in
|
||||||
/// non-incremental mode. Even in non-incremental mode we make sure that
|
/// non-incremental mode. Even in non-incremental mode we make sure that
|
||||||
/// each task has a `DepNodeIndex` that uniquely identifies it. This unique
|
/// each task has a `DepNodeIndex` that uniquely identifies it. This unique
|
||||||
/// ID is used for self-profiling.
|
/// ID is used for self-profiling.
|
||||||
virtual_dep_node_index: Lrc<AtomicU32>,
|
virtual_dep_node_index: Arc<AtomicU32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
rustc_index::newtype_index! {
|
rustc_index::newtype_index! {
|
||||||
|
@ -171,7 +171,7 @@ impl<D: Deps> DepGraph<D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
DepGraph {
|
DepGraph {
|
||||||
data: Some(Lrc::new(DepGraphData {
|
data: Some(Arc::new(DepGraphData {
|
||||||
previous_work_products: prev_work_products,
|
previous_work_products: prev_work_products,
|
||||||
dep_node_debug: Default::default(),
|
dep_node_debug: Default::default(),
|
||||||
current,
|
current,
|
||||||
|
@ -180,12 +180,12 @@ impl<D: Deps> DepGraph<D> {
|
||||||
colors,
|
colors,
|
||||||
debug_loaded_from_disk: Default::default(),
|
debug_loaded_from_disk: Default::default(),
|
||||||
})),
|
})),
|
||||||
virtual_dep_node_index: Lrc::new(AtomicU32::new(0)),
|
virtual_dep_node_index: Arc::new(AtomicU32::new(0)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_disabled() -> DepGraph<D> {
|
pub fn new_disabled() -> DepGraph<D> {
|
||||||
DepGraph { data: None, virtual_dep_node_index: Lrc::new(AtomicU32::new(0)) }
|
DepGraph { data: None, virtual_dep_node_index: Arc::new(AtomicU32::new(0)) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, HashingControls, StableHasher};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||||
use rustc_hir::definitions::DefPathHash;
|
use rustc_hir::definitions::DefPathHash;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
|
@ -117,7 +118,7 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
|
||||||
fn span_data_to_lines_and_cols(
|
fn span_data_to_lines_and_cols(
|
||||||
&mut self,
|
&mut self,
|
||||||
span: &SpanData,
|
span: &SpanData,
|
||||||
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
||||||
self.source_map().span_data_to_lines_and_cols(span)
|
self.source_map().span_data_to_lines_and_cols(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
//! Imports are also considered items and placed into modules here, but not resolved yet.
|
//! Imports are also considered items and placed into modules here, but not resolved yet.
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::visit::{self, AssocCtxt, Visitor, WalkItemKind};
|
use rustc_ast::visit::{self, AssocCtxt, Visitor, WalkItemKind};
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
|
@ -13,7 +14,6 @@ use rustc_ast::{
|
||||||
ItemKind, MetaItemKind, NodeId, StmtKind,
|
ItemKind, MetaItemKind, NodeId, StmtKind,
|
||||||
};
|
};
|
||||||
use rustc_attr_parsing as attr;
|
use rustc_attr_parsing as attr;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_expand::base::ResolverExpand;
|
use rustc_expand::base::ResolverExpand;
|
||||||
use rustc_expand::expand::AstFragment;
|
use rustc_expand::expand::AstFragment;
|
||||||
use rustc_hir::def::{self, *};
|
use rustc_hir::def::{self, *};
|
||||||
|
@ -179,7 +179,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
LoadedMacro::MacroDef { def, ident, attrs, span, edition } => {
|
LoadedMacro::MacroDef { def, ident, attrs, span, edition } => {
|
||||||
self.compile_macro(&def, ident, &attrs, span, ast::DUMMY_NODE_ID, edition)
|
self.compile_macro(&def, ident, &attrs, span, ast::DUMMY_NODE_ID, edition)
|
||||||
}
|
}
|
||||||
LoadedMacro::ProcMacro(ext) => MacroData::new(Lrc::new(ext)),
|
LoadedMacro::ProcMacro(ext) => MacroData::new(Arc::new(ext)),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.macro_map.entry(def_id).or_insert(macro_data)
|
self.macro_map.entry(def_id).or_insert(macro_data)
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion};
|
use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion};
|
||||||
use effective_visibilities::EffectiveVisibilitiesVisitor;
|
use effective_visibilities::EffectiveVisibilitiesVisitor;
|
||||||
|
@ -46,7 +47,7 @@ use rustc_ast::{
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::intern::Interned;
|
use rustc_data_structures::intern::Interned;
|
||||||
use rustc_data_structures::steal::Steal;
|
use rustc_data_structures::steal::Steal;
|
||||||
use rustc_data_structures::sync::{FreezeReadGuard, Lrc};
|
use rustc_data_structures::sync::FreezeReadGuard;
|
||||||
use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed};
|
use rustc_errors::{Applicability, Diag, ErrCode, ErrorGuaranteed};
|
||||||
use rustc_expand::base::{DeriveResolution, SyntaxExtension, SyntaxExtensionKind};
|
use rustc_expand::base::{DeriveResolution, SyntaxExtension, SyntaxExtensionKind};
|
||||||
use rustc_feature::BUILTIN_ATTRIBUTES;
|
use rustc_feature::BUILTIN_ATTRIBUTES;
|
||||||
|
@ -995,13 +996,13 @@ struct DeriveData {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct MacroData {
|
struct MacroData {
|
||||||
ext: Lrc<SyntaxExtension>,
|
ext: Arc<SyntaxExtension>,
|
||||||
rule_spans: Vec<(usize, Span)>,
|
rule_spans: Vec<(usize, Span)>,
|
||||||
macro_rules: bool,
|
macro_rules: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MacroData {
|
impl MacroData {
|
||||||
fn new(ext: Lrc<SyntaxExtension>) -> MacroData {
|
fn new(ext: Arc<SyntaxExtension>) -> MacroData {
|
||||||
MacroData { ext, rule_spans: Vec::new(), macro_rules: false }
|
MacroData { ext, rule_spans: Vec::new(), macro_rules: false }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1110,8 +1111,8 @@ pub struct Resolver<'ra, 'tcx> {
|
||||||
registered_tools: &'tcx RegisteredTools,
|
registered_tools: &'tcx RegisteredTools,
|
||||||
macro_use_prelude: FxHashMap<Symbol, NameBinding<'ra>>,
|
macro_use_prelude: FxHashMap<Symbol, NameBinding<'ra>>,
|
||||||
macro_map: FxHashMap<DefId, MacroData>,
|
macro_map: FxHashMap<DefId, MacroData>,
|
||||||
dummy_ext_bang: Lrc<SyntaxExtension>,
|
dummy_ext_bang: Arc<SyntaxExtension>,
|
||||||
dummy_ext_derive: Lrc<SyntaxExtension>,
|
dummy_ext_derive: Arc<SyntaxExtension>,
|
||||||
non_macro_attr: MacroData,
|
non_macro_attr: MacroData,
|
||||||
local_macro_def_scopes: FxHashMap<LocalDefId, Module<'ra>>,
|
local_macro_def_scopes: FxHashMap<LocalDefId, Module<'ra>>,
|
||||||
ast_transform_scopes: FxHashMap<LocalExpnId, Module<'ra>>,
|
ast_transform_scopes: FxHashMap<LocalExpnId, Module<'ra>>,
|
||||||
|
@ -1510,9 +1511,9 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
registered_tools,
|
registered_tools,
|
||||||
macro_use_prelude: FxHashMap::default(),
|
macro_use_prelude: FxHashMap::default(),
|
||||||
macro_map: FxHashMap::default(),
|
macro_map: FxHashMap::default(),
|
||||||
dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(edition)),
|
dummy_ext_bang: Arc::new(SyntaxExtension::dummy_bang(edition)),
|
||||||
dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(edition)),
|
dummy_ext_derive: Arc::new(SyntaxExtension::dummy_derive(edition)),
|
||||||
non_macro_attr: MacroData::new(Lrc::new(SyntaxExtension::non_macro_attr(edition))),
|
non_macro_attr: MacroData::new(Arc::new(SyntaxExtension::non_macro_attr(edition))),
|
||||||
invocation_parent_scopes: Default::default(),
|
invocation_parent_scopes: Default::default(),
|
||||||
output_macro_rules_scopes: Default::default(),
|
output_macro_rules_scopes: Default::default(),
|
||||||
macro_rules_scopes: Default::default(),
|
macro_rules_scopes: Default::default(),
|
||||||
|
@ -1688,11 +1689,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
CStore::from_tcx(self.tcx)
|
CStore::from_tcx(self.tcx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> {
|
fn dummy_ext(&self, macro_kind: MacroKind) -> Arc<SyntaxExtension> {
|
||||||
match macro_kind {
|
match macro_kind {
|
||||||
MacroKind::Bang => Lrc::clone(&self.dummy_ext_bang),
|
MacroKind::Bang => Arc::clone(&self.dummy_ext_bang),
|
||||||
MacroKind::Derive => Lrc::clone(&self.dummy_ext_derive),
|
MacroKind::Derive => Arc::clone(&self.dummy_ext_derive),
|
||||||
MacroKind::Attr => Lrc::clone(&self.non_macro_attr.ext),
|
MacroKind::Attr => Arc::clone(&self.non_macro_attr.ext),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttributeExt;
|
use rustc_ast::attr::AttributeExt;
|
||||||
use rustc_ast::expand::StrippedCfgItem;
|
use rustc_ast::expand::StrippedCfgItem;
|
||||||
|
@ -10,7 +11,6 @@ use rustc_ast::{self as ast, Crate, NodeId, attr};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_attr_parsing::StabilityLevel;
|
use rustc_attr_parsing::StabilityLevel;
|
||||||
use rustc_data_structures::intern::Interned;
|
use rustc_data_structures::intern::Interned;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::{Applicability, StashKey};
|
use rustc_errors::{Applicability, StashKey};
|
||||||
use rustc_expand::base::{
|
use rustc_expand::base::{
|
||||||
DeriveResolution, Indeterminate, ResolverExpand, SyntaxExtension, SyntaxExtensionKind,
|
DeriveResolution, Indeterminate, ResolverExpand, SyntaxExtension, SyntaxExtensionKind,
|
||||||
|
@ -239,7 +239,7 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
|
||||||
invoc: &Invocation,
|
invoc: &Invocation,
|
||||||
eager_expansion_root: LocalExpnId,
|
eager_expansion_root: LocalExpnId,
|
||||||
force: bool,
|
force: bool,
|
||||||
) -> Result<Lrc<SyntaxExtension>, Indeterminate> {
|
) -> Result<Arc<SyntaxExtension>, Indeterminate> {
|
||||||
let invoc_id = invoc.expansion_data.id;
|
let invoc_id = invoc.expansion_data.id;
|
||||||
let parent_scope = match self.invocation_parent_scopes.get(&invoc_id) {
|
let parent_scope = match self.invocation_parent_scopes.get(&invoc_id) {
|
||||||
Some(parent_scope) => *parent_scope,
|
Some(parent_scope) => *parent_scope,
|
||||||
|
@ -529,7 +529,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
force: bool,
|
force: bool,
|
||||||
deleg_impl: Option<LocalDefId>,
|
deleg_impl: Option<LocalDefId>,
|
||||||
invoc_in_mod_inert_attr: Option<LocalDefId>,
|
invoc_in_mod_inert_attr: Option<LocalDefId>,
|
||||||
) -> Result<(Lrc<SyntaxExtension>, Res), Indeterminate> {
|
) -> Result<(Arc<SyntaxExtension>, Res), Indeterminate> {
|
||||||
let (ext, res) = match self.resolve_macro_or_delegation_path(
|
let (ext, res) = match self.resolve_macro_or_delegation_path(
|
||||||
path,
|
path,
|
||||||
Some(kind),
|
Some(kind),
|
||||||
|
@ -682,7 +682,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
trace: bool,
|
trace: bool,
|
||||||
force: bool,
|
force: bool,
|
||||||
ignore_import: Option<Import<'ra>>,
|
ignore_import: Option<Import<'ra>>,
|
||||||
) -> Result<(Option<Lrc<SyntaxExtension>>, Res), Determinacy> {
|
) -> Result<(Option<Arc<SyntaxExtension>>, Res), Determinacy> {
|
||||||
self.resolve_macro_or_delegation_path(
|
self.resolve_macro_or_delegation_path(
|
||||||
path,
|
path,
|
||||||
kind,
|
kind,
|
||||||
|
@ -705,7 +705,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
deleg_impl: Option<LocalDefId>,
|
deleg_impl: Option<LocalDefId>,
|
||||||
invoc_in_mod_inert_attr: Option<(LocalDefId, NodeId)>,
|
invoc_in_mod_inert_attr: Option<(LocalDefId, NodeId)>,
|
||||||
ignore_import: Option<Import<'ra>>,
|
ignore_import: Option<Import<'ra>>,
|
||||||
) -> Result<(Option<Lrc<SyntaxExtension>>, Res), Determinacy> {
|
) -> Result<(Option<Arc<SyntaxExtension>>, Res), Determinacy> {
|
||||||
let path_span = ast_path.span;
|
let path_span = ast_path.span;
|
||||||
let mut path = Segment::from_path(ast_path);
|
let mut path = Segment::from_path(ast_path);
|
||||||
|
|
||||||
|
@ -788,11 +788,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
Some(impl_def_id) => match res {
|
Some(impl_def_id) => match res {
|
||||||
def::Res::Def(DefKind::Trait, def_id) => {
|
def::Res::Def(DefKind::Trait, def_id) => {
|
||||||
let edition = self.tcx.sess.edition();
|
let edition = self.tcx.sess.edition();
|
||||||
Some(Lrc::new(SyntaxExtension::glob_delegation(def_id, impl_def_id, edition)))
|
Some(Arc::new(SyntaxExtension::glob_delegation(def_id, impl_def_id, edition)))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
None => self.get_macro(res).map(|macro_data| Lrc::clone(¯o_data.ext)),
|
None => self.get_macro(res).map(|macro_data| Arc::clone(¯o_data.ext)),
|
||||||
};
|
};
|
||||||
Ok((ext, res))
|
Ok((ext, res))
|
||||||
}
|
}
|
||||||
|
@ -1130,7 +1130,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
MacroData { ext: Lrc::new(ext), rule_spans, macro_rules: macro_def.macro_rules }
|
MacroData { ext: Arc::new(ext), rule_spans, macro_rules: macro_def.macro_rules }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_accessible(
|
fn path_accessible(
|
||||||
|
|
|
@ -2,11 +2,12 @@
|
||||||
//! It also serves as an input to the parser itself.
|
//! It also serves as an input to the parser itself.
|
||||||
|
|
||||||
use std::str;
|
use std::str;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::attr::AttrIdGenerator;
|
use rustc_ast::attr::AttrIdGenerator;
|
||||||
use rustc_ast::node_id::NodeId;
|
use rustc_ast::node_id::NodeId;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::sync::{AppendOnlyVec, Lock, Lrc};
|
use rustc_data_structures::sync::{AppendOnlyVec, Lock};
|
||||||
use rustc_errors::emitter::{HumanEmitter, SilentEmitter, stderr_destination};
|
use rustc_errors::emitter::{HumanEmitter, SilentEmitter, stderr_destination};
|
||||||
use rustc_errors::{
|
use rustc_errors::{
|
||||||
ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan,
|
ColorConfig, Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, EmissionGuarantee, MultiSpan,
|
||||||
|
@ -214,7 +215,7 @@ pub struct ParseSess {
|
||||||
/// should be. Useful to avoid bad tokenization when encountering emoji. We group them to
|
/// should be. Useful to avoid bad tokenization when encountering emoji. We group them to
|
||||||
/// provide a single error per unique incorrect identifier.
|
/// provide a single error per unique incorrect identifier.
|
||||||
pub bad_unicode_identifiers: Lock<FxIndexMap<Symbol, Vec<Span>>>,
|
pub bad_unicode_identifiers: Lock<FxIndexMap<Symbol, Vec<Span>>>,
|
||||||
source_map: Lrc<SourceMap>,
|
source_map: Arc<SourceMap>,
|
||||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||||
/// operation token that followed it, but that the parser cannot identify without further
|
/// operation token that followed it, but that the parser cannot identify without further
|
||||||
|
@ -239,16 +240,16 @@ impl ParseSess {
|
||||||
/// Used for testing.
|
/// Used for testing.
|
||||||
pub fn new(locale_resources: Vec<&'static str>) -> Self {
|
pub fn new(locale_resources: Vec<&'static str>) -> Self {
|
||||||
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
|
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let emitter = Box::new(
|
let emitter = Box::new(
|
||||||
HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle)
|
HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle)
|
||||||
.sm(Some(Lrc::clone(&sm))),
|
.sm(Some(Arc::clone(&sm))),
|
||||||
);
|
);
|
||||||
let dcx = DiagCtxt::new(emitter);
|
let dcx = DiagCtxt::new(emitter);
|
||||||
ParseSess::with_dcx(dcx, sm)
|
ParseSess::with_dcx(dcx, sm)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_dcx(dcx: DiagCtxt, source_map: Lrc<SourceMap>) -> Self {
|
pub fn with_dcx(dcx: DiagCtxt, source_map: Arc<SourceMap>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
dcx,
|
dcx,
|
||||||
unstable_features: UnstableFeatures::from_environment(None),
|
unstable_features: UnstableFeatures::from_environment(None),
|
||||||
|
@ -276,7 +277,7 @@ impl ParseSess {
|
||||||
emit_fatal_diagnostic: bool,
|
emit_fatal_diagnostic: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
|
let fallback_bundle = fallback_fluent_bundle(locale_resources, false);
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let fatal_emitter =
|
let fatal_emitter =
|
||||||
Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle));
|
Box::new(HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle));
|
||||||
let dcx = DiagCtxt::new(Box::new(SilentEmitter {
|
let dcx = DiagCtxt::new(Box::new(SilentEmitter {
|
||||||
|
@ -293,8 +294,8 @@ impl ParseSess {
|
||||||
&self.source_map
|
&self.source_map
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clone_source_map(&self) -> Lrc<SourceMap> {
|
pub fn clone_source_map(&self) -> Arc<SourceMap> {
|
||||||
Lrc::clone(&self.source_map)
|
Arc::clone(&self.source_map)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn buffer_lint(
|
pub fn buffer_lint(
|
||||||
|
|
|
@ -9,9 +9,7 @@ use std::{env, fmt, io};
|
||||||
use rustc_data_structures::flock;
|
use rustc_data_structures::flock;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||||
use rustc_data_structures::profiling::{SelfProfiler, SelfProfilerRef};
|
use rustc_data_structures::profiling::{SelfProfiler, SelfProfilerRef};
|
||||||
use rustc_data_structures::sync::{
|
use rustc_data_structures::sync::{DynSend, DynSync, Lock, MappedReadGuard, ReadGuard, RwLock};
|
||||||
DynSend, DynSync, Lock, Lrc, MappedReadGuard, ReadGuard, RwLock,
|
|
||||||
};
|
|
||||||
use rustc_errors::annotate_snippet_emitter_writer::AnnotateSnippetEmitter;
|
use rustc_errors::annotate_snippet_emitter_writer::AnnotateSnippetEmitter;
|
||||||
use rustc_errors::codes::*;
|
use rustc_errors::codes::*;
|
||||||
use rustc_errors::emitter::{
|
use rustc_errors::emitter::{
|
||||||
|
@ -138,8 +136,8 @@ pub struct Session {
|
||||||
pub target: Target,
|
pub target: Target,
|
||||||
pub host: Target,
|
pub host: Target,
|
||||||
pub opts: config::Options,
|
pub opts: config::Options,
|
||||||
pub host_tlib_path: Lrc<SearchPath>,
|
pub host_tlib_path: Arc<SearchPath>,
|
||||||
pub target_tlib_path: Lrc<SearchPath>,
|
pub target_tlib_path: Arc<SearchPath>,
|
||||||
pub psess: ParseSess,
|
pub psess: ParseSess,
|
||||||
pub sysroot: PathBuf,
|
pub sysroot: PathBuf,
|
||||||
/// Input, input file path and output file path to this compilation process.
|
/// Input, input file path and output file path to this compilation process.
|
||||||
|
@ -154,7 +152,7 @@ pub struct Session {
|
||||||
pub code_stats: CodeStats,
|
pub code_stats: CodeStats,
|
||||||
|
|
||||||
/// This only ever stores a `LintStore` but we don't want a dependency on that type here.
|
/// This only ever stores a `LintStore` but we don't want a dependency on that type here.
|
||||||
pub lint_store: Option<Lrc<dyn LintStoreMarker>>,
|
pub lint_store: Option<Arc<dyn LintStoreMarker>>,
|
||||||
|
|
||||||
/// Cap lint level specified by a driver specifically.
|
/// Cap lint level specified by a driver specifically.
|
||||||
pub driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
pub driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||||
|
@ -881,8 +879,8 @@ impl Session {
|
||||||
#[allow(rustc::bad_opt_access)]
|
#[allow(rustc::bad_opt_access)]
|
||||||
fn default_emitter(
|
fn default_emitter(
|
||||||
sopts: &config::Options,
|
sopts: &config::Options,
|
||||||
source_map: Lrc<SourceMap>,
|
source_map: Arc<SourceMap>,
|
||||||
bundle: Option<Lrc<FluentBundle>>,
|
bundle: Option<Arc<FluentBundle>>,
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
) -> Box<DynEmitter> {
|
) -> Box<DynEmitter> {
|
||||||
let macro_backtrace = sopts.unstable_opts.macro_backtrace;
|
let macro_backtrace = sopts.unstable_opts.macro_backtrace;
|
||||||
|
@ -966,7 +964,7 @@ pub fn build_session(
|
||||||
early_dcx: EarlyDiagCtxt,
|
early_dcx: EarlyDiagCtxt,
|
||||||
sopts: config::Options,
|
sopts: config::Options,
|
||||||
io: CompilerIO,
|
io: CompilerIO,
|
||||||
bundle: Option<Lrc<rustc_errors::FluentBundle>>,
|
bundle: Option<Arc<rustc_errors::FluentBundle>>,
|
||||||
registry: rustc_errors::registry::Registry,
|
registry: rustc_errors::registry::Registry,
|
||||||
fluent_resources: Vec<&'static str>,
|
fluent_resources: Vec<&'static str>,
|
||||||
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||||
|
@ -1001,7 +999,7 @@ pub fn build_session(
|
||||||
sopts.unstable_opts.translate_directionality_markers,
|
sopts.unstable_opts.translate_directionality_markers,
|
||||||
);
|
);
|
||||||
let source_map = rustc_span::source_map::get_source_map().unwrap();
|
let source_map = rustc_span::source_map::get_source_map().unwrap();
|
||||||
let emitter = default_emitter(&sopts, Lrc::clone(&source_map), bundle, fallback_bundle);
|
let emitter = default_emitter(&sopts, Arc::clone(&source_map), bundle, fallback_bundle);
|
||||||
|
|
||||||
let mut dcx = DiagCtxt::new(emitter)
|
let mut dcx = DiagCtxt::new(emitter)
|
||||||
.with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings))
|
.with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings))
|
||||||
|
@ -1041,13 +1039,13 @@ pub fn build_session(
|
||||||
|
|
||||||
let host_triple = config::host_tuple();
|
let host_triple = config::host_tuple();
|
||||||
let target_triple = sopts.target_triple.tuple();
|
let target_triple = sopts.target_triple.tuple();
|
||||||
let host_tlib_path = Lrc::new(SearchPath::from_sysroot_and_triple(&sysroot, host_triple));
|
let host_tlib_path = Arc::new(SearchPath::from_sysroot_and_triple(&sysroot, host_triple));
|
||||||
let target_tlib_path = if host_triple == target_triple {
|
let target_tlib_path = if host_triple == target_triple {
|
||||||
// Use the same `SearchPath` if host and target triple are identical to avoid unnecessary
|
// Use the same `SearchPath` if host and target triple are identical to avoid unnecessary
|
||||||
// rescanning of the target lib path and an unnecessary allocation.
|
// rescanning of the target lib path and an unnecessary allocation.
|
||||||
Lrc::clone(&host_tlib_path)
|
Arc::clone(&host_tlib_path)
|
||||||
} else {
|
} else {
|
||||||
Lrc::new(SearchPath::from_sysroot_and_triple(&sysroot, target_triple))
|
Arc::new(SearchPath::from_sysroot_and_triple(&sysroot, target_triple))
|
||||||
};
|
};
|
||||||
|
|
||||||
let prof = SelfProfilerRef::new(
|
let prof = SelfProfilerRef::new(
|
||||||
|
@ -1442,7 +1440,7 @@ fn mk_emitter(output: ErrorOutputType) -> Box<DynEmitter> {
|
||||||
config::ErrorOutputType::Json { pretty, json_rendered, color_config } => {
|
config::ErrorOutputType::Json { pretty, json_rendered, color_config } => {
|
||||||
Box::new(JsonEmitter::new(
|
Box::new(JsonEmitter::new(
|
||||||
Box::new(io::BufWriter::new(io::stderr())),
|
Box::new(io::BufWriter::new(io::stderr())),
|
||||||
Some(Lrc::new(SourceMap::new(FilePathMapping::empty()))),
|
Some(Arc::new(SourceMap::new(FilePathMapping::empty()))),
|
||||||
fallback_bundle,
|
fallback_bundle,
|
||||||
pretty,
|
pretty,
|
||||||
json_rendered,
|
json_rendered,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
use std::sync::Arc;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
|
|
||||||
use crate::source_map::SourceMap;
|
use crate::source_map::SourceMap;
|
||||||
use crate::{BytePos, Pos, RelativeBytePos, SourceFile, SpanData};
|
use crate::{BytePos, Pos, RelativeBytePos, SourceFile, SpanData};
|
||||||
|
@ -22,7 +21,7 @@ struct CacheEntry {
|
||||||
// misses for these rare positions. A line lookup for the position via `SourceMap::lookup_line`
|
// misses for these rare positions. A line lookup for the position via `SourceMap::lookup_line`
|
||||||
// after a cache miss will produce the last line number, as desired.
|
// after a cache miss will produce the last line number, as desired.
|
||||||
line: Range<BytePos>,
|
line: Range<BytePos>,
|
||||||
file: Lrc<SourceFile>,
|
file: Arc<SourceFile>,
|
||||||
file_index: usize,
|
file_index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,7 +29,7 @@ impl CacheEntry {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn update(
|
fn update(
|
||||||
&mut self,
|
&mut self,
|
||||||
new_file_and_idx: Option<(Lrc<SourceFile>, usize)>,
|
new_file_and_idx: Option<(Arc<SourceFile>, usize)>,
|
||||||
pos: BytePos,
|
pos: BytePos,
|
||||||
time_stamp: usize,
|
time_stamp: usize,
|
||||||
) {
|
) {
|
||||||
|
@ -63,7 +62,7 @@ pub struct CachingSourceMapView<'sm> {
|
||||||
impl<'sm> CachingSourceMapView<'sm> {
|
impl<'sm> CachingSourceMapView<'sm> {
|
||||||
pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> {
|
pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> {
|
||||||
let files = source_map.files();
|
let files = source_map.files();
|
||||||
let first_file = Lrc::clone(&files[0]);
|
let first_file = Arc::clone(&files[0]);
|
||||||
let entry = CacheEntry {
|
let entry = CacheEntry {
|
||||||
time_stamp: 0,
|
time_stamp: 0,
|
||||||
line_number: 0,
|
line_number: 0,
|
||||||
|
@ -82,7 +81,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
pub fn byte_pos_to_line_and_col(
|
pub fn byte_pos_to_line_and_col(
|
||||||
&mut self,
|
&mut self,
|
||||||
pos: BytePos,
|
pos: BytePos,
|
||||||
) -> Option<(Lrc<SourceFile>, usize, RelativeBytePos)> {
|
) -> Option<(Arc<SourceFile>, usize, RelativeBytePos)> {
|
||||||
self.time_stamp += 1;
|
self.time_stamp += 1;
|
||||||
|
|
||||||
// Check if the position is in one of the cached lines
|
// Check if the position is in one of the cached lines
|
||||||
|
@ -92,7 +91,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
cache_entry.touch(self.time_stamp);
|
cache_entry.touch(self.time_stamp);
|
||||||
|
|
||||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||||
return Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col));
|
return Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col));
|
||||||
}
|
}
|
||||||
|
|
||||||
// No cache hit ...
|
// No cache hit ...
|
||||||
|
@ -109,13 +108,13 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
cache_entry.update(new_file_and_idx, pos, self.time_stamp);
|
cache_entry.update(new_file_and_idx, pos, self.time_stamp);
|
||||||
|
|
||||||
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
let col = RelativeBytePos(pos.to_u32() - cache_entry.line.start.to_u32());
|
||||||
Some((Lrc::clone(&cache_entry.file), cache_entry.line_number, col))
|
Some((Arc::clone(&cache_entry.file), cache_entry.line_number, col))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_data_to_lines_and_cols(
|
pub fn span_data_to_lines_and_cols(
|
||||||
&mut self,
|
&mut self,
|
||||||
span_data: &SpanData,
|
span_data: &SpanData,
|
||||||
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
||||||
self.time_stamp += 1;
|
self.time_stamp += 1;
|
||||||
|
|
||||||
// Check if lo and hi are in the cached lines.
|
// Check if lo and hi are in the cached lines.
|
||||||
|
@ -133,7 +132,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
Lrc::clone(&lo.file),
|
Arc::clone(&lo.file),
|
||||||
lo.line_number,
|
lo.line_number,
|
||||||
span_data.lo - lo.line.start,
|
span_data.lo - lo.line.start,
|
||||||
hi.line_number,
|
hi.line_number,
|
||||||
|
@ -181,7 +180,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
lo.update(new_file_and_idx, span_data.lo, self.time_stamp);
|
lo.update(new_file_and_idx, span_data.lo, self.time_stamp);
|
||||||
|
|
||||||
if !lo.line.contains(&span_data.hi) {
|
if !lo.line.contains(&span_data.hi) {
|
||||||
let new_file_and_idx = Some((Lrc::clone(&lo.file), lo.file_index));
|
let new_file_and_idx = Some((Arc::clone(&lo.file), lo.file_index));
|
||||||
let next_oldest = self.oldest_cache_entry_index_avoid(oldest);
|
let next_oldest = self.oldest_cache_entry_index_avoid(oldest);
|
||||||
let hi = &mut self.line_cache[next_oldest];
|
let hi = &mut self.line_cache[next_oldest];
|
||||||
hi.update(new_file_and_idx, span_data.hi, self.time_stamp);
|
hi.update(new_file_and_idx, span_data.hi, self.time_stamp);
|
||||||
|
@ -227,7 +226,7 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
assert_eq!(lo.file_index, hi.file_index);
|
assert_eq!(lo.file_index, hi.file_index);
|
||||||
|
|
||||||
Some((
|
Some((
|
||||||
Lrc::clone(&lo.file),
|
Arc::clone(&lo.file),
|
||||||
lo.line_number,
|
lo.line_number,
|
||||||
span_data.lo - lo.line.start,
|
span_data.lo - lo.line.start,
|
||||||
hi.line_number,
|
hi.line_number,
|
||||||
|
@ -271,13 +270,13 @@ impl<'sm> CachingSourceMapView<'sm> {
|
||||||
oldest
|
oldest
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_for_position(&self, pos: BytePos) -> Option<(Lrc<SourceFile>, usize)> {
|
fn file_for_position(&self, pos: BytePos) -> Option<(Arc<SourceFile>, usize)> {
|
||||||
if !self.source_map.files().is_empty() {
|
if !self.source_map.files().is_empty() {
|
||||||
let file_idx = self.source_map.lookup_source_file_idx(pos);
|
let file_idx = self.source_map.lookup_source_file_idx(pos);
|
||||||
let file = &self.source_map.files()[file_idx];
|
let file = &self.source_map.files()[file_idx];
|
||||||
|
|
||||||
if file_contains(file, pos) {
|
if file_contains(file, pos) {
|
||||||
return Some((Lrc::clone(file), file_idx));
|
return Some((Arc::clone(file), file_idx));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,11 +29,12 @@ use std::collections::hash_map::Entry;
|
||||||
use std::collections::hash_set::Entry as SetEntry;
|
use std::collections::hash_set::Entry as SetEntry;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::fingerprint::Fingerprint;
|
use rustc_data_structures::fingerprint::Fingerprint;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
use rustc_data_structures::stable_hasher::{Hash64, HashStable, HashingControls, StableHasher};
|
use rustc_data_structures::stable_hasher::{Hash64, HashStable, HashingControls, StableHasher};
|
||||||
use rustc_data_structures::sync::{Lock, Lrc, WorkerLocal};
|
use rustc_data_structures::sync::{Lock, WorkerLocal};
|
||||||
use rustc_data_structures::unhash::UnhashMap;
|
use rustc_data_structures::unhash::UnhashMap;
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||||
|
@ -904,7 +905,7 @@ impl Span {
|
||||||
/// allowed inside this span.
|
/// allowed inside this span.
|
||||||
pub fn mark_with_reason(
|
pub fn mark_with_reason(
|
||||||
self,
|
self,
|
||||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||||
reason: DesugaringKind,
|
reason: DesugaringKind,
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
ctx: impl HashStableContext,
|
ctx: impl HashStableContext,
|
||||||
|
@ -959,7 +960,7 @@ pub struct ExpnData {
|
||||||
/// List of `#[unstable]`/feature-gated features that the macro is allowed to use
|
/// List of `#[unstable]`/feature-gated features that the macro is allowed to use
|
||||||
/// internally without forcing the whole crate to opt-in
|
/// internally without forcing the whole crate to opt-in
|
||||||
/// to them.
|
/// to them.
|
||||||
pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
pub allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||||
/// Edition of the crate in which the macro is defined.
|
/// Edition of the crate in which the macro is defined.
|
||||||
pub edition: Edition,
|
pub edition: Edition,
|
||||||
/// The `DefId` of the macro being invoked,
|
/// The `DefId` of the macro being invoked,
|
||||||
|
@ -985,7 +986,7 @@ impl ExpnData {
|
||||||
parent: ExpnId,
|
parent: ExpnId,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
def_site: Span,
|
def_site: Span,
|
||||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
allow_internal_unstable: Option<Arc<[Symbol]>>,
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
macro_def_id: Option<DefId>,
|
macro_def_id: Option<DefId>,
|
||||||
parent_module: Option<DefId>,
|
parent_module: Option<DefId>,
|
||||||
|
@ -1037,7 +1038,7 @@ impl ExpnData {
|
||||||
kind: ExpnKind,
|
kind: ExpnKind,
|
||||||
call_site: Span,
|
call_site: Span,
|
||||||
edition: Edition,
|
edition: Edition,
|
||||||
allow_internal_unstable: Lrc<[Symbol]>,
|
allow_internal_unstable: Arc<[Symbol]>,
|
||||||
macro_def_id: Option<DefId>,
|
macro_def_id: Option<DefId>,
|
||||||
parent_module: Option<DefId>,
|
parent_module: Option<DefId>,
|
||||||
) -> ExpnData {
|
) -> ExpnData {
|
||||||
|
|
|
@ -83,11 +83,12 @@ use std::io::{self, Read};
|
||||||
use std::ops::{Add, Range, Sub};
|
use std::ops::{Add, Range, Sub};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{fmt, iter};
|
use std::{fmt, iter};
|
||||||
|
|
||||||
use md5::{Digest, Md5};
|
use md5::{Digest, Md5};
|
||||||
use rustc_data_structures::stable_hasher::{Hash64, Hash128, HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{Hash64, Hash128, HashStable, StableHasher};
|
||||||
use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock, Lrc};
|
use rustc_data_structures::sync::{FreezeLock, FreezeWriteGuard, Lock};
|
||||||
use rustc_data_structures::unord::UnordMap;
|
use rustc_data_structures::unord::UnordMap;
|
||||||
use sha1::Sha1;
|
use sha1::Sha1;
|
||||||
use sha2::Sha256;
|
use sha2::Sha256;
|
||||||
|
@ -110,7 +111,7 @@ pub struct SessionGlobals {
|
||||||
/// The session's source map, if there is one. This field should only be
|
/// The session's source map, if there is one. This field should only be
|
||||||
/// used in places where the `Session` is truly not available, such as
|
/// used in places where the `Session` is truly not available, such as
|
||||||
/// `<Span as Debug>::fmt`.
|
/// `<Span as Debug>::fmt`.
|
||||||
source_map: Option<Lrc<SourceMap>>,
|
source_map: Option<Arc<SourceMap>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SessionGlobals {
|
impl SessionGlobals {
|
||||||
|
@ -120,7 +121,7 @@ impl SessionGlobals {
|
||||||
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
span_interner: Lock::new(span_encoding::SpanInterner::default()),
|
||||||
metavar_spans: Default::default(),
|
metavar_spans: Default::default(),
|
||||||
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
hygiene_data: Lock::new(hygiene::HygieneData::new(edition)),
|
||||||
source_map: sm_inputs.map(|inputs| Lrc::new(SourceMap::with_inputs(inputs))),
|
source_map: sm_inputs.map(|inputs| Arc::new(SourceMap::with_inputs(inputs))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1430,7 +1431,7 @@ pub enum ExternalSource {
|
||||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||||
pub enum ExternalSourceKind {
|
pub enum ExternalSourceKind {
|
||||||
/// The external source has been loaded already.
|
/// The external source has been loaded already.
|
||||||
Present(Lrc<String>),
|
Present(Arc<String>),
|
||||||
/// No attempt has been made to load the external source.
|
/// No attempt has been made to load the external source.
|
||||||
AbsentOk,
|
AbsentOk,
|
||||||
/// A failed attempt has been made to load the external source.
|
/// A failed attempt has been made to load the external source.
|
||||||
|
@ -1670,7 +1671,7 @@ pub struct SourceFile {
|
||||||
/// (e.g., `<anon>`).
|
/// (e.g., `<anon>`).
|
||||||
pub name: FileName,
|
pub name: FileName,
|
||||||
/// The complete source code.
|
/// The complete source code.
|
||||||
pub src: Option<Lrc<String>>,
|
pub src: Option<Arc<String>>,
|
||||||
/// The source code's hash.
|
/// The source code's hash.
|
||||||
pub src_hash: SourceFileHash,
|
pub src_hash: SourceFileHash,
|
||||||
/// Used to enable cargo to use checksums to check if a crate is fresh rather
|
/// Used to enable cargo to use checksums to check if a crate is fresh rather
|
||||||
|
@ -1931,7 +1932,7 @@ impl SourceFile {
|
||||||
|
|
||||||
Ok(SourceFile {
|
Ok(SourceFile {
|
||||||
name,
|
name,
|
||||||
src: Some(Lrc::new(src)),
|
src: Some(Arc::new(src)),
|
||||||
src_hash,
|
src_hash,
|
||||||
checksum_hash,
|
checksum_hash,
|
||||||
external_src: FreezeLock::frozen(ExternalSource::Unneeded),
|
external_src: FreezeLock::frozen(ExternalSource::Unneeded),
|
||||||
|
@ -2050,7 +2051,7 @@ impl SourceFile {
|
||||||
} = &mut *external_src
|
} = &mut *external_src
|
||||||
{
|
{
|
||||||
*src_kind = if let Some(src) = src {
|
*src_kind = if let Some(src) = src {
|
||||||
ExternalSourceKind::Present(Lrc::new(src))
|
ExternalSourceKind::Present(Arc::new(src))
|
||||||
} else {
|
} else {
|
||||||
ExternalSourceKind::AbsentErr
|
ExternalSourceKind::AbsentErr
|
||||||
};
|
};
|
||||||
|
@ -2490,7 +2491,7 @@ impl<D: Decoder> Decodable<D> for RelativeBytePos {
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Loc {
|
pub struct Loc {
|
||||||
/// Information about the original source.
|
/// Information about the original source.
|
||||||
pub file: Lrc<SourceFile>,
|
pub file: Arc<SourceFile>,
|
||||||
/// The (1-based) line number.
|
/// The (1-based) line number.
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
/// The (0-based) column offset.
|
/// The (0-based) column offset.
|
||||||
|
@ -2502,13 +2503,13 @@ pub struct Loc {
|
||||||
// Used to be structural records.
|
// Used to be structural records.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceFileAndLine {
|
pub struct SourceFileAndLine {
|
||||||
pub sf: Lrc<SourceFile>,
|
pub sf: Arc<SourceFile>,
|
||||||
/// Index of line, starting from 0.
|
/// Index of line, starting from 0.
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
}
|
}
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct SourceFileAndBytePos {
|
pub struct SourceFileAndBytePos {
|
||||||
pub sf: Lrc<SourceFile>,
|
pub sf: Arc<SourceFile>,
|
||||||
pub pos: BytePos,
|
pub pos: BytePos,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2525,7 +2526,7 @@ pub struct LineInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FileLines {
|
pub struct FileLines {
|
||||||
pub file: Lrc<SourceFile>,
|
pub file: Arc<SourceFile>,
|
||||||
pub lines: Vec<LineInfo>,
|
pub lines: Vec<LineInfo>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2591,7 +2592,7 @@ pub trait HashStableContext {
|
||||||
fn span_data_to_lines_and_cols(
|
fn span_data_to_lines_and_cols(
|
||||||
&mut self,
|
&mut self,
|
||||||
span: &SpanData,
|
span: &SpanData,
|
||||||
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)>;
|
) -> Option<(Arc<SourceFile>, usize, BytePos, usize, BytePos)>;
|
||||||
fn hashing_controls(&self) -> HashingControls;
|
fn hashing_controls(&self) -> HashingControls;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -102,8 +102,8 @@ pub trait FileLoader {
|
||||||
fn read_file(&self, path: &Path) -> io::Result<String>;
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
||||||
|
|
||||||
/// Read the contents of a potentially non-UTF-8 file into memory.
|
/// Read the contents of a potentially non-UTF-8 file into memory.
|
||||||
/// We don't normalize binary files, so we can start in an Lrc.
|
/// We don't normalize binary files, so we can start in an Arc.
|
||||||
fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>>;
|
fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A FileLoader that uses std::fs to load real files.
|
/// A FileLoader that uses std::fs to load real files.
|
||||||
|
@ -124,12 +124,12 @@ impl FileLoader for RealFileLoader {
|
||||||
fs::read_to_string(path)
|
fs::read_to_string(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_binary_file(&self, path: &Path) -> io::Result<Lrc<[u8]>> {
|
fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>> {
|
||||||
let mut file = fs::File::open(path)?;
|
let mut file = fs::File::open(path)?;
|
||||||
let len = file.metadata()?.len();
|
let len = file.metadata()?.len();
|
||||||
|
|
||||||
let mut bytes = Lrc::new_uninit_slice(len as usize);
|
let mut bytes = Arc::new_uninit_slice(len as usize);
|
||||||
let mut buf = BorrowedBuf::from(Lrc::get_mut(&mut bytes).unwrap());
|
let mut buf = BorrowedBuf::from(Arc::get_mut(&mut bytes).unwrap());
|
||||||
match file.read_buf_exact(buf.unfilled()) {
|
match file.read_buf_exact(buf.unfilled()) {
|
||||||
Ok(()) => {}
|
Ok(()) => {}
|
||||||
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => {
|
Err(e) if e.kind() == io::ErrorKind::UnexpectedEof => {
|
||||||
|
@ -146,9 +146,9 @@ impl FileLoader for RealFileLoader {
|
||||||
// But we are not guaranteed to be at the end of the file, because we did not attempt to do
|
// But we are not guaranteed to be at the end of the file, because we did not attempt to do
|
||||||
// a read with a non-zero-sized buffer and get Ok(0).
|
// a read with a non-zero-sized buffer and get Ok(0).
|
||||||
// So we do small read to a fixed-size buffer. If the read returns no bytes then we're
|
// So we do small read to a fixed-size buffer. If the read returns no bytes then we're
|
||||||
// already done, and we just return the Lrc we built above.
|
// already done, and we just return the Arc we built above.
|
||||||
// If the read returns bytes however, we just fall back to reading into a Vec then turning
|
// If the read returns bytes however, we just fall back to reading into a Vec then turning
|
||||||
// that into an Lrc, losing our nice peak memory behavior. This fallback code path should
|
// that into an Arc, losing our nice peak memory behavior. This fallback code path should
|
||||||
// be rarely exercised.
|
// be rarely exercised.
|
||||||
|
|
||||||
let mut probe = [0u8; 32];
|
let mut probe = [0u8; 32];
|
||||||
|
@ -172,8 +172,8 @@ impl FileLoader for RealFileLoader {
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct SourceMapFiles {
|
struct SourceMapFiles {
|
||||||
source_files: monotonic::MonotonicVec<Lrc<SourceFile>>,
|
source_files: monotonic::MonotonicVec<Arc<SourceFile>>,
|
||||||
stable_id_to_source_file: UnhashMap<StableSourceFileId, Lrc<SourceFile>>,
|
stable_id_to_source_file: UnhashMap<StableSourceFileId, Arc<SourceFile>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Used to construct a `SourceMap` with `SourceMap::with_inputs`.
|
/// Used to construct a `SourceMap` with `SourceMap::with_inputs`.
|
||||||
|
@ -232,7 +232,7 @@ impl SourceMap {
|
||||||
self.file_loader.file_exists(path)
|
self.file_loader.file_exists(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
pub fn load_file(&self, path: &Path) -> io::Result<Arc<SourceFile>> {
|
||||||
let src = self.file_loader.read_file(path)?;
|
let src = self.file_loader.read_file(path)?;
|
||||||
let filename = path.to_owned().into();
|
let filename = path.to_owned().into();
|
||||||
Ok(self.new_source_file(filename, src))
|
Ok(self.new_source_file(filename, src))
|
||||||
|
@ -242,7 +242,7 @@ impl SourceMap {
|
||||||
///
|
///
|
||||||
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
||||||
/// takes place.
|
/// takes place.
|
||||||
pub fn load_binary_file(&self, path: &Path) -> io::Result<(Lrc<[u8]>, Span)> {
|
pub fn load_binary_file(&self, path: &Path) -> io::Result<(Arc<[u8]>, Span)> {
|
||||||
let bytes = self.file_loader.read_binary_file(path)?;
|
let bytes = self.file_loader.read_binary_file(path)?;
|
||||||
|
|
||||||
// We need to add file to the `SourceMap`, so that it is present
|
// We need to add file to the `SourceMap`, so that it is present
|
||||||
|
@ -265,14 +265,14 @@ impl SourceMap {
|
||||||
|
|
||||||
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
|
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
|
||||||
// any existing indices pointing into `files`.
|
// any existing indices pointing into `files`.
|
||||||
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> {
|
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Arc<SourceFile>>> {
|
||||||
ReadGuard::map(self.files.borrow(), |files| &files.source_files)
|
ReadGuard::map(self.files.borrow(), |files| &files.source_files)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn source_file_by_stable_id(
|
pub fn source_file_by_stable_id(
|
||||||
&self,
|
&self,
|
||||||
stable_id: StableSourceFileId,
|
stable_id: StableSourceFileId,
|
||||||
) -> Option<Lrc<SourceFile>> {
|
) -> Option<Arc<SourceFile>> {
|
||||||
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -280,7 +280,7 @@ impl SourceMap {
|
||||||
&self,
|
&self,
|
||||||
file_id: StableSourceFileId,
|
file_id: StableSourceFileId,
|
||||||
mut file: SourceFile,
|
mut file: SourceFile,
|
||||||
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
) -> Result<Arc<SourceFile>, OffsetOverflowError> {
|
||||||
let mut files = self.files.borrow_mut();
|
let mut files = self.files.borrow_mut();
|
||||||
|
|
||||||
file.start_pos = BytePos(if let Some(last_file) = files.source_files.last() {
|
file.start_pos = BytePos(if let Some(last_file) = files.source_files.last() {
|
||||||
|
@ -291,9 +291,9 @@ impl SourceMap {
|
||||||
0
|
0
|
||||||
});
|
});
|
||||||
|
|
||||||
let file = Lrc::new(file);
|
let file = Arc::new(file);
|
||||||
files.source_files.push(Lrc::clone(&file));
|
files.source_files.push(Arc::clone(&file));
|
||||||
files.stable_id_to_source_file.insert(file_id, Lrc::clone(&file));
|
files.stable_id_to_source_file.insert(file_id, Arc::clone(&file));
|
||||||
|
|
||||||
Ok(file)
|
Ok(file)
|
||||||
}
|
}
|
||||||
|
@ -301,7 +301,7 @@ impl SourceMap {
|
||||||
/// Creates a new `SourceFile`.
|
/// Creates a new `SourceFile`.
|
||||||
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
|
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
|
||||||
/// unmodified.
|
/// unmodified.
|
||||||
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
|
pub fn new_source_file(&self, filename: FileName, src: String) -> Arc<SourceFile> {
|
||||||
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
|
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"fatal error: rustc does not support text files larger than {} bytes",
|
"fatal error: rustc does not support text files larger than {} bytes",
|
||||||
|
@ -315,7 +315,7 @@ impl SourceMap {
|
||||||
&self,
|
&self,
|
||||||
filename: FileName,
|
filename: FileName,
|
||||||
src: String,
|
src: String,
|
||||||
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
) -> Result<Arc<SourceFile>, OffsetOverflowError> {
|
||||||
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
||||||
// but this is okay because the directory determined by `path.pop()` will
|
// but this is okay because the directory determined by `path.pop()` will
|
||||||
// be empty, so the working directory will be used.
|
// be empty, so the working directory will be used.
|
||||||
|
@ -353,7 +353,7 @@ impl SourceMap {
|
||||||
multibyte_chars: Vec<MultiByteChar>,
|
multibyte_chars: Vec<MultiByteChar>,
|
||||||
normalized_pos: Vec<NormalizedPos>,
|
normalized_pos: Vec<NormalizedPos>,
|
||||||
metadata_index: u32,
|
metadata_index: u32,
|
||||||
) -> Lrc<SourceFile> {
|
) -> Arc<SourceFile> {
|
||||||
let source_len = RelativeBytePos::from_u32(source_len);
|
let source_len = RelativeBytePos::from_u32(source_len);
|
||||||
|
|
||||||
let source_file = SourceFile {
|
let source_file = SourceFile {
|
||||||
|
@ -393,9 +393,9 @@ impl SourceMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the SourceFile that contains the given `BytePos`
|
/// Return the SourceFile that contains the given `BytePos`
|
||||||
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
|
pub fn lookup_source_file(&self, pos: BytePos) -> Arc<SourceFile> {
|
||||||
let idx = self.lookup_source_file_idx(pos);
|
let idx = self.lookup_source_file_idx(pos);
|
||||||
Lrc::clone(&(*self.files.borrow().source_files)[idx])
|
Arc::clone(&(*self.files.borrow().source_files)[idx])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Looks up source information about a `BytePos`.
|
/// Looks up source information about a `BytePos`.
|
||||||
|
@ -406,7 +406,7 @@ impl SourceMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the corresponding `SourceFile` is empty, does not return a line number.
|
/// If the corresponding `SourceFile` is empty, does not return a line number.
|
||||||
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Arc<SourceFile>> {
|
||||||
let f = self.lookup_source_file(pos);
|
let f = self.lookup_source_file(pos);
|
||||||
|
|
||||||
let pos = f.relative_position(pos);
|
let pos = f.relative_position(pos);
|
||||||
|
@ -441,7 +441,7 @@ impl SourceMap {
|
||||||
pub fn span_to_location_info(
|
pub fn span_to_location_info(
|
||||||
&self,
|
&self,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
) -> (Option<Lrc<SourceFile>>, usize, usize, usize, usize) {
|
) -> (Option<Arc<SourceFile>>, usize, usize, usize, usize) {
|
||||||
if self.files.borrow().source_files.is_empty() || sp.is_dummy() {
|
if self.files.borrow().source_files.is_empty() || sp.is_dummy() {
|
||||||
return (None, 0, 0, 0, 0);
|
return (None, 0, 0, 0, 0);
|
||||||
}
|
}
|
||||||
|
@ -477,7 +477,7 @@ impl SourceMap {
|
||||||
if lo != hi {
|
if lo != hi {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
let f = Lrc::clone(&(*self.files.borrow().source_files)[lo]);
|
let f = Arc::clone(&(*self.files.borrow().source_files)[lo]);
|
||||||
let lo = f.relative_position(sp.lo());
|
let lo = f.relative_position(sp.lo());
|
||||||
let hi = f.relative_position(sp.hi());
|
let hi = f.relative_position(sp.hi());
|
||||||
f.lookup_line(lo) != f.lookup_line(hi)
|
f.lookup_line(lo) != f.lookup_line(hi)
|
||||||
|
@ -998,12 +998,12 @@ impl SourceMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
pub fn get_source_file(&self, filename: &FileName) -> Option<Arc<SourceFile>> {
|
||||||
// Remap filename before lookup
|
// Remap filename before lookup
|
||||||
let filename = self.path_mapping().map_filename_prefix(filename).0;
|
let filename = self.path_mapping().map_filename_prefix(filename).0;
|
||||||
for sf in self.files.borrow().source_files.iter() {
|
for sf in self.files.borrow().source_files.iter() {
|
||||||
if filename == sf.name {
|
if filename == sf.name {
|
||||||
return Some(Lrc::clone(&sf));
|
return Some(Arc::clone(&sf));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -1012,7 +1012,7 @@ impl SourceMap {
|
||||||
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
|
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
|
||||||
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
||||||
let idx = self.lookup_source_file_idx(bpos);
|
let idx = self.lookup_source_file_idx(bpos);
|
||||||
let sf = Lrc::clone(&(*self.files.borrow().source_files)[idx]);
|
let sf = Arc::clone(&(*self.files.borrow().source_files)[idx]);
|
||||||
let offset = bpos - sf.start_pos;
|
let offset = bpos - sf.start_pos;
|
||||||
SourceFileAndBytePos { sf, pos: offset }
|
SourceFileAndBytePos { sf, pos: offset }
|
||||||
}
|
}
|
||||||
|
@ -1082,7 +1082,7 @@ impl SourceMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_source_map() -> Option<Lrc<SourceMap>> {
|
pub fn get_source_map() -> Option<Arc<SourceMap>> {
|
||||||
with_session_globals(|session_globals| session_globals.source_map.clone())
|
with_session_globals(|session_globals| session_globals.source_map.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -538,7 +538,7 @@ fn test_next_point() {
|
||||||
#[cfg(target_os = "linux")]
|
#[cfg(target_os = "linux")]
|
||||||
#[test]
|
#[test]
|
||||||
fn read_binary_file_handles_lying_stat() {
|
fn read_binary_file_handles_lying_stat() {
|
||||||
// read_binary_file tries to read the contents of a file into an Lrc<[u8]> while
|
// read_binary_file tries to read the contents of a file into an Arc<[u8]> while
|
||||||
// never having two copies of the data in memory at once. This is an optimization
|
// never having two copies of the data in memory at once. This is an optimization
|
||||||
// to support include_bytes! with large files. But since Rust allocators are
|
// to support include_bytes! with large files. But since Rust allocators are
|
||||||
// sensitive to alignment, our implementation can't be bootstrapped off calling
|
// sensitive to alignment, our implementation can't be bootstrapped off calling
|
||||||
|
|
|
@ -12,13 +12,11 @@ mod delayed_map;
|
||||||
mod impl_ {
|
mod impl_ {
|
||||||
pub use rustc_data_structures::sso::{SsoHashMap, SsoHashSet};
|
pub use rustc_data_structures::sso::{SsoHashMap, SsoHashSet};
|
||||||
pub use rustc_data_structures::stack::ensure_sufficient_stack;
|
pub use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
pub use rustc_data_structures::sync::Lrc;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "nightly"))]
|
#[cfg(not(feature = "nightly"))]
|
||||||
mod impl_ {
|
mod impl_ {
|
||||||
pub use std::collections::{HashMap as SsoHashMap, HashSet as SsoHashSet};
|
pub use std::collections::{HashMap as SsoHashMap, HashSet as SsoHashSet};
|
||||||
pub use std::sync::Arc as Lrc;
|
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
|
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
|
||||||
|
|
|
@ -46,12 +46,12 @@
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_index::{Idx, IndexVec};
|
use rustc_index::{Idx, IndexVec};
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
use tracing::{debug, instrument};
|
use tracing::{debug, instrument};
|
||||||
|
|
||||||
use crate::data_structures::Lrc;
|
|
||||||
use crate::inherent::*;
|
use crate::inherent::*;
|
||||||
use crate::visit::{TypeVisitable, TypeVisitableExt as _};
|
use crate::visit::{TypeVisitable, TypeVisitableExt as _};
|
||||||
use crate::{self as ty, Interner};
|
use crate::{self as ty, Interner};
|
||||||
|
@ -273,28 +273,28 @@ impl<I: Interner, T: TypeFoldable<I>, E: TypeFoldable<I>> TypeFoldable<I> for Re
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Lrc<T> {
|
impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Arc<T> {
|
||||||
fn try_fold_with<F: FallibleTypeFolder<I>>(mut self, folder: &mut F) -> Result<Self, F::Error> {
|
fn try_fold_with<F: FallibleTypeFolder<I>>(mut self, folder: &mut F) -> Result<Self, F::Error> {
|
||||||
// We merely want to replace the contained `T`, if at all possible,
|
// We merely want to replace the contained `T`, if at all possible,
|
||||||
// so that we don't needlessly allocate a new `Lrc` or indeed clone
|
// so that we don't needlessly allocate a new `Arc` or indeed clone
|
||||||
// the contained type.
|
// the contained type.
|
||||||
unsafe {
|
unsafe {
|
||||||
// First step is to ensure that we have a unique reference to
|
// First step is to ensure that we have a unique reference to
|
||||||
// the contained type, which `Lrc::make_mut` will accomplish (by
|
// the contained type, which `Arc::make_mut` will accomplish (by
|
||||||
// allocating a new `Lrc` and cloning the `T` only if required).
|
// allocating a new `Arc` and cloning the `T` only if required).
|
||||||
// This is done *before* casting to `Lrc<ManuallyDrop<T>>` so that
|
// This is done *before* casting to `Arc<ManuallyDrop<T>>` so that
|
||||||
// panicking during `make_mut` does not leak the `T`.
|
// panicking during `make_mut` does not leak the `T`.
|
||||||
Lrc::make_mut(&mut self);
|
Arc::make_mut(&mut self);
|
||||||
|
|
||||||
// Casting to `Lrc<ManuallyDrop<T>>` is safe because `ManuallyDrop`
|
// Casting to `Arc<ManuallyDrop<T>>` is safe because `ManuallyDrop`
|
||||||
// is `repr(transparent)`.
|
// is `repr(transparent)`.
|
||||||
let ptr = Lrc::into_raw(self).cast::<mem::ManuallyDrop<T>>();
|
let ptr = Arc::into_raw(self).cast::<mem::ManuallyDrop<T>>();
|
||||||
let mut unique = Lrc::from_raw(ptr);
|
let mut unique = Arc::from_raw(ptr);
|
||||||
|
|
||||||
// Call to `Lrc::make_mut` above guarantees that `unique` is the
|
// Call to `Arc::make_mut` above guarantees that `unique` is the
|
||||||
// sole reference to the contained value, so we can avoid doing
|
// sole reference to the contained value, so we can avoid doing
|
||||||
// a checked `get_mut` here.
|
// a checked `get_mut` here.
|
||||||
let slot = Lrc::get_mut(&mut unique).unwrap_unchecked();
|
let slot = Arc::get_mut(&mut unique).unwrap_unchecked();
|
||||||
|
|
||||||
// Semantically move the contained type out from `unique`, fold
|
// Semantically move the contained type out from `unique`, fold
|
||||||
// it, then move the folded value back into `unique`. Should
|
// it, then move the folded value back into `unique`. Should
|
||||||
|
@ -304,8 +304,8 @@ impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Lrc<T> {
|
||||||
let folded = owned.try_fold_with(folder)?;
|
let folded = owned.try_fold_with(folder)?;
|
||||||
*slot = mem::ManuallyDrop::new(folded);
|
*slot = mem::ManuallyDrop::new(folded);
|
||||||
|
|
||||||
// Cast back to `Lrc<T>`.
|
// Cast back to `Arc<T>`.
|
||||||
Ok(Lrc::from_raw(Lrc::into_raw(unique).cast()))
|
Ok(Arc::from_raw(Arc::into_raw(unique).cast()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::ops::ControlFlow;
|
use std::ops::ControlFlow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast_ir::visit::VisitorResult;
|
use rustc_ast_ir::visit::VisitorResult;
|
||||||
use rustc_ast_ir::{try_visit, walk_visitable_list};
|
use rustc_ast_ir::{try_visit, walk_visitable_list};
|
||||||
|
@ -50,7 +51,6 @@ use rustc_index::{Idx, IndexVec};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
use crate::data_structures::Lrc;
|
|
||||||
use crate::inherent::*;
|
use crate::inherent::*;
|
||||||
use crate::{self as ty, Interner, TypeFlags};
|
use crate::{self as ty, Interner, TypeFlags};
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ impl<I: Interner, T: TypeVisitable<I>, E: TypeVisitable<I>> TypeVisitable<I> for
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Interner, T: TypeVisitable<I>> TypeVisitable<I> for Lrc<T> {
|
impl<I: Interner, T: TypeVisitable<I>> TypeVisitable<I> for Arc<T> {
|
||||||
fn visit_with<V: TypeVisitor<I>>(&self, visitor: &mut V) -> V::Result {
|
fn visit_with<V: TypeVisitor<I>>(&self, visitor: &mut V) -> V::Result {
|
||||||
(**self).visit_with(visitor)
|
(**self).visit_with(visitor)
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,9 +15,9 @@ extern crate rustc_span;
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast_pretty::pprust::item_to_string;
|
use rustc_ast_pretty::pprust::item_to_string;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_driver::{Compilation, run_compiler};
|
use rustc_driver::{Compilation, run_compiler};
|
||||||
use rustc_interface::interface::{Compiler, Config};
|
use rustc_interface::interface::{Compiler, Config};
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
|
@ -43,7 +43,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_binary_file(&self, _path: &Path) -> io::Result<Lrc<[u8]>> {
|
fn read_binary_file(&self, _path: &Path) -> io::Result<Arc<[u8]>> {
|
||||||
Err(io::Error::other("oops"))
|
Err(io::Error::other("oops"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,9 +15,9 @@ extern crate rustc_span;
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast_pretty::pprust::item_to_string;
|
use rustc_ast_pretty::pprust::item_to_string;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_driver::{Compilation, run_compiler};
|
use rustc_driver::{Compilation, run_compiler};
|
||||||
use rustc_interface::interface::{Compiler, Config};
|
use rustc_interface::interface::{Compiler, Config};
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
|
@ -43,7 +43,7 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_binary_file(&self, _path: &Path) -> io::Result<Lrc<[u8]>> {
|
fn read_binary_file(&self, _path: &Path) -> io::Result<Arc<[u8]>> {
|
||||||
Err(io::Error::other("oops"))
|
Err(io::Error::other("oops"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ Lints are registered via the [`LintStore::register_lint`] function. This should
|
||||||
happen just once for any lint, or an ICE will occur.
|
happen just once for any lint, or an ICE will occur.
|
||||||
|
|
||||||
Once the registration is complete, we "freeze" the lint store by placing it in
|
Once the registration is complete, we "freeze" the lint store by placing it in
|
||||||
an `Lrc`.
|
an `Arc`.
|
||||||
|
|
||||||
Lint passes are registered separately into one of the categories
|
Lint passes are registered separately into one of the categories
|
||||||
(pre-expansion, early, late, late module). Passes are registered as a closure
|
(pre-expansion, early, late, late module). Passes are registered as a closure
|
||||||
|
|
|
@ -46,7 +46,6 @@ are implemented differently depending on whether `parallel-compiler` is true.
|
||||||
|
|
||||||
| data structure | parallel | non-parallel |
|
| data structure | parallel | non-parallel |
|
||||||
| -------------------------------- | --------------------------------------------------- | ------------ |
|
| -------------------------------- | --------------------------------------------------- | ------------ |
|
||||||
| Lrc | std::sync::Arc | std::rc::Rc |
|
|
||||||
| Weak | std::sync::Weak | std::rc::Weak |
|
| Weak | std::sync::Weak | std::rc::Weak |
|
||||||
| Atomic{Bool}/{Usize}/{U32}/{U64} | std::sync::atomic::Atomic{Bool}/{Usize}/{U32}/{U64} | (std::cell::Cell<bool/usize/u32/u64>) |
|
| Atomic{Bool}/{Usize}/{U32}/{U64} | std::sync::atomic::Atomic{Bool}/{Usize}/{U32}/{U64} | (std::cell::Cell<bool/usize/u32/u64>) |
|
||||||
| OnceCell | std::sync::OnceLock | std::cell::OnceCell |
|
| OnceCell | std::sync::OnceLock | std::cell::OnceCell |
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
use std::sync::LazyLock;
|
use std::sync::{Arc, LazyLock};
|
||||||
use std::{io, mem};
|
use std::{io, mem};
|
||||||
|
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_data_structures::unord::UnordSet;
|
use rustc_data_structures::unord::UnordSet;
|
||||||
use rustc_driver::USING_INTERNAL_FEATURES;
|
use rustc_driver::USING_INTERNAL_FEATURES;
|
||||||
use rustc_errors::TerminalUrl;
|
use rustc_errors::TerminalUrl;
|
||||||
|
@ -145,7 +144,7 @@ impl<'tcx> DocContext<'tcx> {
|
||||||
/// will be created for the `DiagCtxt`.
|
/// will be created for the `DiagCtxt`.
|
||||||
pub(crate) fn new_dcx(
|
pub(crate) fn new_dcx(
|
||||||
error_format: ErrorOutputType,
|
error_format: ErrorOutputType,
|
||||||
source_map: Option<Lrc<source_map::SourceMap>>,
|
source_map: Option<Arc<source_map::SourceMap>>,
|
||||||
diagnostic_width: Option<usize>,
|
diagnostic_width: Option<usize>,
|
||||||
unstable_opts: &UnstableOptions,
|
unstable_opts: &UnstableOptions,
|
||||||
) -> rustc_errors::DiagCtxt {
|
) -> rustc_errors::DiagCtxt {
|
||||||
|
@ -173,7 +172,7 @@ pub(crate) fn new_dcx(
|
||||||
}
|
}
|
||||||
ErrorOutputType::Json { pretty, json_rendered, color_config } => {
|
ErrorOutputType::Json { pretty, json_rendered, color_config } => {
|
||||||
let source_map = source_map.unwrap_or_else(|| {
|
let source_map = source_map.unwrap_or_else(|| {
|
||||||
Lrc::new(source_map::SourceMap::new(source_map::FilePathMapping::empty()))
|
Arc::new(source_map::SourceMap::new(source_map::FilePathMapping::empty()))
|
||||||
});
|
});
|
||||||
Box::new(
|
Box::new(
|
||||||
JsonEmitter::new(
|
JsonEmitter::new(
|
||||||
|
|
|
@ -2,9 +2,9 @@
|
||||||
//! runnable, e.g. by adding a `main` function if it doesn't already exist.
|
//! runnable, e.g. by adding a `main` function if it doesn't already exist.
|
||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::emitter::stderr_destination;
|
use rustc_errors::emitter::stderr_destination;
|
||||||
use rustc_errors::{ColorConfig, FatalError};
|
use rustc_errors::{ColorConfig, FatalError};
|
||||||
use rustc_parse::new_parser_from_source_str;
|
use rustc_parse::new_parser_from_source_str;
|
||||||
|
@ -280,7 +280,7 @@ fn parse_source(
|
||||||
|
|
||||||
// Any errors in parsing should also appear when the doctest is compiled for real, so just
|
// Any errors in parsing should also appear when the doctest is compiled for real, so just
|
||||||
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
|
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
||||||
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
||||||
false,
|
false,
|
||||||
|
@ -474,7 +474,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> Option<AttrKind>
|
||||||
let filename = FileName::anon_source_code(source);
|
let filename = FileName::anon_source_code(source);
|
||||||
// Any errors in parsing should also appear when the doctest is compiled for real, so just
|
// Any errors in parsing should also appear when the doctest is compiled for real, so just
|
||||||
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
|
// send all the errors that librustc_ast emits directly into a `Sink` instead of stderr.
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
||||||
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
||||||
false,
|
false,
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
//! Doctest functionality used only for doctests in `.rs` source files.
|
//! Doctest functionality used only for doctests in `.rs` source files.
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId};
|
use rustc_hir::def_id::{CRATE_DEF_ID, LocalDefId};
|
||||||
use rustc_hir::{self as hir, CRATE_HIR_ID, intravisit};
|
use rustc_hir::{self as hir, CRATE_HIR_ID, intravisit};
|
||||||
use rustc_middle::hir::nested_filter;
|
use rustc_middle::hir::nested_filter;
|
||||||
|
@ -17,7 +17,7 @@ use crate::clean::{Attributes, extract_cfg_from_attrs};
|
||||||
use crate::html::markdown::{self, ErrorCodes, LangString, MdRelLine};
|
use crate::html::markdown::{self, ErrorCodes, LangString, MdRelLine};
|
||||||
|
|
||||||
struct RustCollector {
|
struct RustCollector {
|
||||||
source_map: Lrc<SourceMap>,
|
source_map: Arc<SourceMap>,
|
||||||
tests: Vec<ScrapedDocTest>,
|
tests: Vec<ScrapedDocTest>,
|
||||||
cur_path: Vec<String>,
|
cur_path: Vec<String>,
|
||||||
position: Span,
|
position: Span,
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
//! Validates syntax inside Rust code blocks (\`\`\`rust).
|
//! Validates syntax inside Rust code blocks (\`\`\`rust).
|
||||||
|
|
||||||
use rustc_data_structures::sync::{Lock, Lrc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use rustc_data_structures::sync::Lock;
|
||||||
use rustc_errors::emitter::Emitter;
|
use rustc_errors::emitter::Emitter;
|
||||||
use rustc_errors::registry::Registry;
|
use rustc_errors::registry::Registry;
|
||||||
use rustc_errors::translation::{Translate, to_fluent_args};
|
use rustc_errors::translation::{Translate, to_fluent_args};
|
||||||
|
@ -32,14 +34,14 @@ fn check_rust_syntax(
|
||||||
dox: &str,
|
dox: &str,
|
||||||
code_block: RustCodeBlock,
|
code_block: RustCodeBlock,
|
||||||
) {
|
) {
|
||||||
let buffer = Lrc::new(Lock::new(Buffer::default()));
|
let buffer = Arc::new(Lock::new(Buffer::default()));
|
||||||
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
let fallback_bundle = rustc_errors::fallback_fluent_bundle(
|
||||||
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(),
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
let emitter = BufferEmitter { buffer: Lrc::clone(&buffer), fallback_bundle };
|
let emitter = BufferEmitter { buffer: Arc::clone(&buffer), fallback_bundle };
|
||||||
|
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
|
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
|
||||||
let source = dox[code_block.code].to_owned();
|
let source = dox[code_block.code].to_owned();
|
||||||
let psess = ParseSess::with_dcx(dcx, sm);
|
let psess = ParseSess::with_dcx(dcx, sm);
|
||||||
|
@ -141,7 +143,7 @@ struct Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct BufferEmitter {
|
struct BufferEmitter {
|
||||||
buffer: Lrc<Lock<Buffer>>,
|
buffer: Arc<Lock<Buffer>>,
|
||||||
fallback_bundle: LazyFallbackBundle,
|
fallback_bundle: LazyFallbackBundle,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
use std::sync::Arc;
|
||||||
use super::MIXED_ATTRIBUTES_STYLE;
|
use super::MIXED_ATTRIBUTES_STYLE;
|
||||||
use clippy_utils::diagnostics::span_lint;
|
use clippy_utils::diagnostics::span_lint;
|
||||||
use rustc_ast::{AttrKind, AttrStyle, Attribute};
|
use rustc_ast::{AttrKind, AttrStyle, Attribute};
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_lint::{EarlyContext, LintContext};
|
use rustc_lint::{EarlyContext, LintContext};
|
||||||
use rustc_span::source_map::SourceMap;
|
use rustc_span::source_map::SourceMap;
|
||||||
use rustc_span::{SourceFile, Span, Symbol};
|
use rustc_span::{SourceFile, Span, Symbol};
|
||||||
|
@ -79,7 +79,7 @@ fn lint_mixed_attrs(cx: &EarlyContext<'_>, attrs: &[Attribute]) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn attr_in_same_src_as_item(source_map: &SourceMap, item_src: &Lrc<SourceFile>, attr_span: Span) -> bool {
|
fn attr_in_same_src_as_item(source_map: &SourceMap, item_src: &Arc<SourceFile>, attr_span: Span) -> bool {
|
||||||
let attr_src = source_map.lookup_source_file(attr_span.lo());
|
let attr_src = source_map.lookup_source_file(attr_span.lo());
|
||||||
Lrc::ptr_eq(item_src, &attr_src)
|
Arc::ptr_eq(item_src, &attr_src)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::{io, thread};
|
use std::{io, thread};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::doc::{NEEDLESS_DOCTEST_MAIN, TEST_ATTR_IN_DOCTEST};
|
use crate::doc::{NEEDLESS_DOCTEST_MAIN, TEST_ATTR_IN_DOCTEST};
|
||||||
use clippy_utils::diagnostics::span_lint;
|
use clippy_utils::diagnostics::span_lint;
|
||||||
use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind};
|
use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::emitter::HumanEmitter;
|
use rustc_errors::emitter::HumanEmitter;
|
||||||
use rustc_errors::{Diag, DiagCtxt};
|
use rustc_errors::{Diag, DiagCtxt};
|
||||||
use rustc_lint::LateContext;
|
use rustc_lint::LateContext;
|
||||||
|
@ -46,8 +46,8 @@ pub fn check(
|
||||||
rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
|
rustc_errors::fallback_fluent_bundle(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), false);
|
||||||
let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle);
|
let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle);
|
||||||
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
|
let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
|
||||||
#[expect(clippy::arc_with_non_send_sync)] // `Lrc` is expected by with_dcx
|
#[expect(clippy::arc_with_non_send_sync)] // `Arc` is expected by with_dcx
|
||||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let psess = ParseSess::with_dcx(dcx, sm);
|
let psess = ParseSess::with_dcx(dcx, sm);
|
||||||
|
|
||||||
let mut parser = match new_parser_from_source_str(&psess, filename, code) {
|
let mut parser = match new_parser_from_source_str(&psess, filename, code) {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use std::sync::Arc;
|
||||||
use std::ops::ControlFlow;
|
use std::ops::ControlFlow;
|
||||||
|
|
||||||
use clippy_config::Conf;
|
use clippy_config::Conf;
|
||||||
|
@ -6,7 +7,6 @@ use clippy_utils::is_lint_allowed;
|
||||||
use clippy_utils::source::walk_span_to_context;
|
use clippy_utils::source::walk_span_to_context;
|
||||||
use clippy_utils::visitors::{Descend, for_each_expr};
|
use clippy_utils::visitors::{Descend, for_each_expr};
|
||||||
use hir::HirId;
|
use hir::HirId;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
|
use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
|
||||||
use rustc_lexer::{TokenKind, tokenize};
|
use rustc_lexer::{TokenKind, tokenize};
|
||||||
|
@ -480,7 +480,7 @@ fn item_has_safety_comment(cx: &LateContext<'_>, item: &hir::Item<'_>) -> HasSaf
|
||||||
if let Some(comment_start) = comment_start
|
if let Some(comment_start) = comment_start
|
||||||
&& let Ok(unsafe_line) = source_map.lookup_line(item.span.lo())
|
&& let Ok(unsafe_line) = source_map.lookup_line(item.span.lo())
|
||||||
&& let Ok(comment_start_line) = source_map.lookup_line(comment_start)
|
&& let Ok(comment_start_line) = source_map.lookup_line(comment_start)
|
||||||
&& Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
|
&& Arc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
|
||||||
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
||||||
{
|
{
|
||||||
return if comment_start_line.line >= unsafe_line.line {
|
return if comment_start_line.line >= unsafe_line.line {
|
||||||
|
@ -520,7 +520,7 @@ fn stmt_has_safety_comment(cx: &LateContext<'_>, span: Span, hir_id: HirId) -> H
|
||||||
if let Some(comment_start) = comment_start
|
if let Some(comment_start) = comment_start
|
||||||
&& let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
&& let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
||||||
&& let Ok(comment_start_line) = source_map.lookup_line(comment_start)
|
&& let Ok(comment_start_line) = source_map.lookup_line(comment_start)
|
||||||
&& Lrc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
|
&& Arc::ptr_eq(&unsafe_line.sf, &comment_start_line.sf)
|
||||||
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
||||||
{
|
{
|
||||||
return if comment_start_line.line >= unsafe_line.line {
|
return if comment_start_line.line >= unsafe_line.line {
|
||||||
|
@ -580,7 +580,7 @@ fn span_from_macro_expansion_has_safety_comment(cx: &LateContext<'_>, span: Span
|
||||||
// ^--------------------------------------------^
|
// ^--------------------------------------------^
|
||||||
if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
||||||
&& let Ok(macro_line) = source_map.lookup_line(ctxt.outer_expn_data().def_site.lo())
|
&& let Ok(macro_line) = source_map.lookup_line(ctxt.outer_expn_data().def_site.lo())
|
||||||
&& Lrc::ptr_eq(&unsafe_line.sf, ¯o_line.sf)
|
&& Arc::ptr_eq(&unsafe_line.sf, ¯o_line.sf)
|
||||||
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
||||||
{
|
{
|
||||||
if macro_line.line < unsafe_line.line {
|
if macro_line.line < unsafe_line.line {
|
||||||
|
@ -641,7 +641,7 @@ fn span_has_safety_comment(cx: &LateContext<'_>, span: Span) -> bool {
|
||||||
if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
if let Ok(unsafe_line) = source_map.lookup_line(span.lo())
|
||||||
&& let Some(body_span) = walk_span_to_context(search_span, SyntaxContext::root())
|
&& let Some(body_span) = walk_span_to_context(search_span, SyntaxContext::root())
|
||||||
&& let Ok(body_line) = source_map.lookup_line(body_span.lo())
|
&& let Ok(body_line) = source_map.lookup_line(body_span.lo())
|
||||||
&& Lrc::ptr_eq(&unsafe_line.sf, &body_line.sf)
|
&& Arc::ptr_eq(&unsafe_line.sf, &body_line.sf)
|
||||||
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
&& let Some(src) = unsafe_line.sf.src.as_deref()
|
||||||
{
|
{
|
||||||
// Get the text from the start of function body to the unsafe block.
|
// Get the text from the start of function body to the unsafe block.
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::sync::OnceLock;
|
use std::sync::{Arc, OnceLock};
|
||||||
|
|
||||||
use rustc_ast::{Attribute, Crate};
|
use rustc_ast::{Attribute, Crate};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||||
use rustc_session::impl_lint_pass;
|
use rustc_session::impl_lint_pass;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct AttrStorage(pub Lrc<OnceLock<Vec<Span>>>);
|
pub struct AttrStorage(pub Arc<OnceLock<Vec<Span>>>);
|
||||||
|
|
||||||
pub struct AttrCollector {
|
pub struct AttrCollector {
|
||||||
storage: AttrStorage,
|
storage: AttrStorage,
|
||||||
|
|
|
@ -4,13 +4,14 @@
|
||||||
//! executable MIR bodies, so we have to do this instead.
|
//! executable MIR bodies, so we have to do this instead.
|
||||||
#![allow(clippy::float_cmp)]
|
#![allow(clippy::float_cmp)]
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::source::{SpanRangeExt, walk_span_to_context};
|
use crate::source::{SpanRangeExt, walk_span_to_context};
|
||||||
use crate::{clip, is_direct_expn_of, sext, unsext};
|
use crate::{clip, is_direct_expn_of, sext, unsext};
|
||||||
|
|
||||||
use rustc_apfloat::Float;
|
use rustc_apfloat::Float;
|
||||||
use rustc_apfloat::ieee::{Half, Quad};
|
use rustc_apfloat::ieee::{Half, Quad};
|
||||||
use rustc_ast::ast::{self, LitFloatType, LitKind};
|
use rustc_ast::ast::{self, LitFloatType, LitKind};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_hir::def::{DefKind, Res};
|
use rustc_hir::def::{DefKind, Res};
|
||||||
use rustc_hir::{
|
use rustc_hir::{
|
||||||
BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp,
|
BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp,
|
||||||
|
@ -37,7 +38,7 @@ pub enum Constant<'tcx> {
|
||||||
/// A `String` (e.g., "abc").
|
/// A `String` (e.g., "abc").
|
||||||
Str(String),
|
Str(String),
|
||||||
/// A binary string (e.g., `b"abc"`).
|
/// A binary string (e.g., `b"abc"`).
|
||||||
Binary(Lrc<[u8]>),
|
Binary(Arc<[u8]>),
|
||||||
/// A single `char` (e.g., `'a'`).
|
/// A single `char` (e.g., `'a'`).
|
||||||
Char(char),
|
Char(char),
|
||||||
/// An integer's bit representation.
|
/// An integer's bit representation.
|
||||||
|
@ -305,7 +306,7 @@ pub fn lit_to_mir_constant<'tcx>(lit: &LitKind, ty: Option<Ty<'tcx>>) -> Constan
|
||||||
match *lit {
|
match *lit {
|
||||||
LitKind::Str(ref is, _) => Constant::Str(is.to_string()),
|
LitKind::Str(ref is, _) => Constant::Str(is.to_string()),
|
||||||
LitKind::Byte(b) => Constant::Int(u128::from(b)),
|
LitKind::Byte(b) => Constant::Int(u128::from(b)),
|
||||||
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => Constant::Binary(Lrc::clone(s)),
|
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => Constant::Binary(Arc::clone(s)),
|
||||||
LitKind::Char(c) => Constant::Char(c),
|
LitKind::Char(c) => Constant::Char(c),
|
||||||
LitKind::Int(n, _) => Constant::Int(n.get()),
|
LitKind::Int(n, _) => Constant::Int(n.get()),
|
||||||
LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty {
|
LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty {
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
#![allow(clippy::similar_names)] // `expr` and `expn`
|
#![allow(clippy::similar_names)] // `expr` and `expn`
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::get_unique_attr;
|
use crate::get_unique_attr;
|
||||||
use crate::visitors::{Descend, for_each_expr_without_closures};
|
use crate::visitors::{Descend, for_each_expr_without_closures};
|
||||||
|
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use rustc_ast::{FormatArgs, FormatArgument, FormatPlaceholder};
|
use rustc_ast::{FormatArgs, FormatArgument, FormatPlaceholder};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::{Lrc, OnceLock};
|
use rustc_data_structures::sync::OnceLock;
|
||||||
use rustc_hir::{self as hir, Expr, ExprKind, HirId, Node, QPath};
|
use rustc_hir::{self as hir, Expr, ExprKind, HirId, Node, QPath};
|
||||||
use rustc_lint::{LateContext, LintContext};
|
use rustc_lint::{LateContext, LintContext};
|
||||||
use rustc_span::def_id::DefId;
|
use rustc_span::def_id::DefId;
|
||||||
|
@ -393,7 +395,7 @@ fn is_assert_arg(cx: &LateContext<'_>, expr: &Expr<'_>, assert_expn: ExpnId) ->
|
||||||
/// Stores AST [`FormatArgs`] nodes for use in late lint passes, as they are in a desugared form in
|
/// Stores AST [`FormatArgs`] nodes for use in late lint passes, as they are in a desugared form in
|
||||||
/// the HIR
|
/// the HIR
|
||||||
#[derive(Default, Clone)]
|
#[derive(Default, Clone)]
|
||||||
pub struct FormatArgsStorage(Lrc<OnceLock<FxHashMap<Span, FormatArgs>>>);
|
pub struct FormatArgsStorage(Arc<OnceLock<FxHashMap<Span, FormatArgs>>>);
|
||||||
|
|
||||||
impl FormatArgsStorage {
|
impl FormatArgsStorage {
|
||||||
/// Returns an AST [`FormatArgs`] node if a `format_args` expansion is found as a descendant of
|
/// Returns an AST [`FormatArgs`] node if a `format_args` expansion is found as a descendant of
|
||||||
|
|
|
@ -2,8 +2,9 @@
|
||||||
|
|
||||||
#![allow(clippy::module_name_repetitions)]
|
#![allow(clippy::module_name_repetitions)]
|
||||||
|
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::{LitKind, StrStyle};
|
use rustc_ast::{LitKind, StrStyle};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_errors::Applicability;
|
use rustc_errors::Applicability;
|
||||||
use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource};
|
use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource};
|
||||||
use rustc_lint::{EarlyContext, LateContext};
|
use rustc_lint::{EarlyContext, LateContext};
|
||||||
|
@ -204,7 +205,7 @@ impl fmt::Display for SourceText {
|
||||||
fn get_source_range(sm: &SourceMap, sp: Range<BytePos>) -> Option<SourceFileRange> {
|
fn get_source_range(sm: &SourceMap, sp: Range<BytePos>) -> Option<SourceFileRange> {
|
||||||
let start = sm.lookup_byte_offset(sp.start);
|
let start = sm.lookup_byte_offset(sp.start);
|
||||||
let end = sm.lookup_byte_offset(sp.end);
|
let end = sm.lookup_byte_offset(sp.end);
|
||||||
if !Lrc::ptr_eq(&start.sf, &end.sf) || start.pos > end.pos {
|
if !Arc::ptr_eq(&start.sf, &end.sf) || start.pos > end.pos {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
sm.ensure_source_file_source_present(&start.sf);
|
sm.ensure_source_file_source_present(&start.sf);
|
||||||
|
@ -277,7 +278,7 @@ fn trim_start(sm: &SourceMap, sp: Range<BytePos>) -> Range<BytePos> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct SourceFileRange {
|
pub struct SourceFileRange {
|
||||||
pub sf: Lrc<SourceFile>,
|
pub sf: Arc<SourceFile>,
|
||||||
pub range: Range<usize>,
|
pub range: Range<usize>,
|
||||||
}
|
}
|
||||||
impl SourceFileRange {
|
impl SourceFileRange {
|
||||||
|
|
|
@ -29,7 +29,7 @@ use std::num::NonZero;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Once;
|
use std::sync::{Arc, Once};
|
||||||
use std::sync::atomic::{AtomicI32, AtomicU32, Ordering};
|
use std::sync::atomic::{AtomicI32, AtomicU32, Ordering};
|
||||||
|
|
||||||
use miri::{
|
use miri::{
|
||||||
|
@ -38,7 +38,6 @@ use miri::{
|
||||||
};
|
};
|
||||||
use rustc_abi::ExternAbi;
|
use rustc_abi::ExternAbi;
|
||||||
use rustc_data_structures::sync;
|
use rustc_data_structures::sync;
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_driver::Compilation;
|
use rustc_driver::Compilation;
|
||||||
use rustc_hir::def_id::LOCAL_CRATE;
|
use rustc_hir::def_id::LOCAL_CRATE;
|
||||||
use rustc_hir::{self as hir, Node};
|
use rustc_hir::{self as hir, Node};
|
||||||
|
@ -134,7 +133,7 @@ impl rustc_driver::Callbacks for MiriCompilerCalls {
|
||||||
// HACK: rustc will emit "crate ... required to be available in rlib format, but
|
// HACK: rustc will emit "crate ... required to be available in rlib format, but
|
||||||
// was not found in this form" errors once we use `tcx.dependency_formats()` if
|
// was not found in this form" errors once we use `tcx.dependency_formats()` if
|
||||||
// there's no rlib provided, so setting a dummy path here to workaround those errors.
|
// there's no rlib provided, so setting a dummy path here to workaround those errors.
|
||||||
Lrc::make_mut(&mut crate_source).rlib = Some((PathBuf::new(), PathKind::All));
|
Arc::make_mut(&mut crate_source).rlib = Some((PathBuf::new(), PathKind::All));
|
||||||
crate_source
|
crate_source
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
|
@ -3,9 +3,9 @@
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::{cmp, fmt, iter, str};
|
use std::{cmp, fmt, iter, str};
|
||||||
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_span::SourceFile;
|
use rustc_span::SourceFile;
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, ser};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer, ser};
|
||||||
use serde_json as json;
|
use serde_json as json;
|
||||||
|
@ -13,7 +13,7 @@ use thiserror::Error;
|
||||||
|
|
||||||
/// A range of lines in a file, inclusive of both ends.
|
/// A range of lines in a file, inclusive of both ends.
|
||||||
pub struct LineRange {
|
pub struct LineRange {
|
||||||
pub(crate) file: Lrc<SourceFile>,
|
pub(crate) file: Arc<SourceFile>,
|
||||||
pub(crate) lo: usize,
|
pub(crate) lo: usize,
|
||||||
pub(crate) hi: usize,
|
pub(crate) hi: usize,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
|
use rustc_data_structures::sync::IntoDynSyncSend;
|
||||||
use rustc_errors::emitter::{DynEmitter, Emitter, HumanEmitter, SilentEmitter, stderr_destination};
|
use rustc_errors::emitter::{DynEmitter, Emitter, HumanEmitter, SilentEmitter, stderr_destination};
|
||||||
use rustc_errors::registry::Registry;
|
use rustc_errors::registry::Registry;
|
||||||
use rustc_errors::translation::Translate;
|
use rustc_errors::translation::Translate;
|
||||||
|
@ -25,17 +26,17 @@ use crate::{Config, ErrorKind, FileName};
|
||||||
/// ParseSess holds structs necessary for constructing a parser.
|
/// ParseSess holds structs necessary for constructing a parser.
|
||||||
pub(crate) struct ParseSess {
|
pub(crate) struct ParseSess {
|
||||||
raw_psess: RawParseSess,
|
raw_psess: RawParseSess,
|
||||||
ignore_path_set: Lrc<IgnorePathSet>,
|
ignore_path_set: Arc<IgnorePathSet>,
|
||||||
can_reset_errors: Lrc<AtomicBool>,
|
can_reset_errors: Arc<AtomicBool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Emit errors against every files expect ones specified in the `ignore_path_set`.
|
/// Emit errors against every files expect ones specified in the `ignore_path_set`.
|
||||||
struct SilentOnIgnoredFilesEmitter {
|
struct SilentOnIgnoredFilesEmitter {
|
||||||
ignore_path_set: IntoDynSyncSend<Lrc<IgnorePathSet>>,
|
ignore_path_set: IntoDynSyncSend<Arc<IgnorePathSet>>,
|
||||||
source_map: Lrc<SourceMap>,
|
source_map: Arc<SourceMap>,
|
||||||
emitter: Box<DynEmitter>,
|
emitter: Box<DynEmitter>,
|
||||||
has_non_ignorable_parser_errors: bool,
|
has_non_ignorable_parser_errors: bool,
|
||||||
can_reset: Lrc<AtomicBool>,
|
can_reset: Arc<AtomicBool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SilentOnIgnoredFilesEmitter {
|
impl SilentOnIgnoredFilesEmitter {
|
||||||
|
@ -96,9 +97,9 @@ impl From<Color> for ColorConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_dcx(
|
fn default_dcx(
|
||||||
source_map: Lrc<SourceMap>,
|
source_map: Arc<SourceMap>,
|
||||||
ignore_path_set: Lrc<IgnorePathSet>,
|
ignore_path_set: Arc<IgnorePathSet>,
|
||||||
can_reset: Lrc<AtomicBool>,
|
can_reset: Arc<AtomicBool>,
|
||||||
show_parse_errors: bool,
|
show_parse_errors: bool,
|
||||||
color: Color,
|
color: Color,
|
||||||
) -> DiagCtxt {
|
) -> DiagCtxt {
|
||||||
|
@ -139,16 +140,16 @@ fn default_dcx(
|
||||||
impl ParseSess {
|
impl ParseSess {
|
||||||
pub(crate) fn new(config: &Config) -> Result<ParseSess, ErrorKind> {
|
pub(crate) fn new(config: &Config) -> Result<ParseSess, ErrorKind> {
|
||||||
let ignore_path_set = match IgnorePathSet::from_ignore_list(&config.ignore()) {
|
let ignore_path_set = match IgnorePathSet::from_ignore_list(&config.ignore()) {
|
||||||
Ok(ignore_path_set) => Lrc::new(ignore_path_set),
|
Ok(ignore_path_set) => Arc::new(ignore_path_set),
|
||||||
Err(e) => return Err(ErrorKind::InvalidGlobPattern(e)),
|
Err(e) => return Err(ErrorKind::InvalidGlobPattern(e)),
|
||||||
};
|
};
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let can_reset_errors = Lrc::new(AtomicBool::new(false));
|
let can_reset_errors = Arc::new(AtomicBool::new(false));
|
||||||
|
|
||||||
let dcx = default_dcx(
|
let dcx = default_dcx(
|
||||||
Lrc::clone(&source_map),
|
Arc::clone(&source_map),
|
||||||
Lrc::clone(&ignore_path_set),
|
Arc::clone(&ignore_path_set),
|
||||||
Lrc::clone(&can_reset_errors),
|
Arc::clone(&can_reset_errors),
|
||||||
config.show_parse_errors(),
|
config.show_parse_errors(),
|
||||||
config.color(),
|
config.color(),
|
||||||
);
|
);
|
||||||
|
@ -211,7 +212,7 @@ impl ParseSess {
|
||||||
self.raw_psess.source_map().span_to_filename(span).into()
|
self.raw_psess.source_map().span_to_filename(span).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn span_to_file_contents(&self, span: Span) -> Lrc<rustc_span::SourceFile> {
|
pub(crate) fn span_to_file_contents(&self, span: Span) -> Arc<rustc_span::SourceFile> {
|
||||||
self.raw_psess
|
self.raw_psess
|
||||||
.source_map()
|
.source_map()
|
||||||
.lookup_source_file(span.data().lo)
|
.lookup_source_file(span.data().lo)
|
||||||
|
@ -255,11 +256,11 @@ impl ParseSess {
|
||||||
SnippetProvider::new(
|
SnippetProvider::new(
|
||||||
source_file.start_pos,
|
source_file.start_pos,
|
||||||
source_file.end_position(),
|
source_file.end_position(),
|
||||||
Lrc::clone(source_file.src.as_ref().unwrap()),
|
Arc::clone(source_file.src.as_ref().unwrap()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_original_snippet(&self, file_name: &FileName) -> Option<Lrc<String>> {
|
pub(crate) fn get_original_snippet(&self, file_name: &FileName) -> Option<Arc<String>> {
|
||||||
self.raw_psess
|
self.raw_psess
|
||||||
.source_map()
|
.source_map()
|
||||||
.get_source_file(&file_name.into())
|
.get_source_file(&file_name.into())
|
||||||
|
@ -331,7 +332,7 @@ mod tests {
|
||||||
use std::sync::atomic::AtomicU32;
|
use std::sync::atomic::AtomicU32;
|
||||||
|
|
||||||
struct TestEmitter {
|
struct TestEmitter {
|
||||||
num_emitted_errors: Lrc<AtomicU32>,
|
num_emitted_errors: Arc<AtomicU32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Translate for TestEmitter {
|
impl Translate for TestEmitter {
|
||||||
|
@ -365,15 +366,15 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_emitter(
|
fn build_emitter(
|
||||||
num_emitted_errors: Lrc<AtomicU32>,
|
num_emitted_errors: Arc<AtomicU32>,
|
||||||
can_reset: Lrc<AtomicBool>,
|
can_reset: Arc<AtomicBool>,
|
||||||
source_map: Option<Lrc<SourceMap>>,
|
source_map: Option<Arc<SourceMap>>,
|
||||||
ignore_list: Option<IgnoreList>,
|
ignore_list: Option<IgnoreList>,
|
||||||
) -> SilentOnIgnoredFilesEmitter {
|
) -> SilentOnIgnoredFilesEmitter {
|
||||||
let emitter_writer = TestEmitter { num_emitted_errors };
|
let emitter_writer = TestEmitter { num_emitted_errors };
|
||||||
let source_map =
|
let source_map =
|
||||||
source_map.unwrap_or_else(|| Lrc::new(SourceMap::new(FilePathMapping::empty())));
|
source_map.unwrap_or_else(|| Arc::new(SourceMap::new(FilePathMapping::empty())));
|
||||||
let ignore_path_set = Lrc::new(
|
let ignore_path_set = Arc::new(
|
||||||
IgnorePathSet::from_ignore_list(&ignore_list.unwrap_or_default()).unwrap(),
|
IgnorePathSet::from_ignore_list(&ignore_list.unwrap_or_default()).unwrap(),
|
||||||
);
|
);
|
||||||
SilentOnIgnoredFilesEmitter {
|
SilentOnIgnoredFilesEmitter {
|
||||||
|
@ -393,10 +394,10 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn handles_fatal_parse_error_in_ignored_file() {
|
fn handles_fatal_parse_error_in_ignored_file() {
|
||||||
let num_emitted_errors = Lrc::new(AtomicU32::new(0));
|
let num_emitted_errors = Arc::new(AtomicU32::new(0));
|
||||||
let can_reset_errors = Lrc::new(AtomicBool::new(false));
|
let can_reset_errors = Arc::new(AtomicBool::new(false));
|
||||||
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let source =
|
let source =
|
||||||
String::from(r#"extern "system" fn jni_symbol!( funcName ) ( ... ) -> {} "#);
|
String::from(r#"extern "system" fn jni_symbol!( funcName ) ( ... ) -> {} "#);
|
||||||
source_map.new_source_file(
|
source_map.new_source_file(
|
||||||
|
@ -405,9 +406,9 @@ mod tests {
|
||||||
);
|
);
|
||||||
let registry = Registry::new(&[]);
|
let registry = Registry::new(&[]);
|
||||||
let mut emitter = build_emitter(
|
let mut emitter = build_emitter(
|
||||||
Lrc::clone(&num_emitted_errors),
|
Arc::clone(&num_emitted_errors),
|
||||||
Lrc::clone(&can_reset_errors),
|
Arc::clone(&can_reset_errors),
|
||||||
Some(Lrc::clone(&source_map)),
|
Some(Arc::clone(&source_map)),
|
||||||
Some(ignore_list),
|
Some(ignore_list),
|
||||||
);
|
);
|
||||||
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
||||||
|
@ -420,10 +421,10 @@ mod tests {
|
||||||
#[nightly_only_test]
|
#[nightly_only_test]
|
||||||
#[test]
|
#[test]
|
||||||
fn handles_recoverable_parse_error_in_ignored_file() {
|
fn handles_recoverable_parse_error_in_ignored_file() {
|
||||||
let num_emitted_errors = Lrc::new(AtomicU32::new(0));
|
let num_emitted_errors = Arc::new(AtomicU32::new(0));
|
||||||
let can_reset_errors = Lrc::new(AtomicBool::new(false));
|
let can_reset_errors = Arc::new(AtomicBool::new(false));
|
||||||
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let source = String::from(r#"pub fn bar() { 1x; }"#);
|
let source = String::from(r#"pub fn bar() { 1x; }"#);
|
||||||
source_map.new_source_file(
|
source_map.new_source_file(
|
||||||
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
|
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
|
||||||
|
@ -431,9 +432,9 @@ mod tests {
|
||||||
);
|
);
|
||||||
let registry = Registry::new(&[]);
|
let registry = Registry::new(&[]);
|
||||||
let mut emitter = build_emitter(
|
let mut emitter = build_emitter(
|
||||||
Lrc::clone(&num_emitted_errors),
|
Arc::clone(&num_emitted_errors),
|
||||||
Lrc::clone(&can_reset_errors),
|
Arc::clone(&can_reset_errors),
|
||||||
Some(Lrc::clone(&source_map)),
|
Some(Arc::clone(&source_map)),
|
||||||
Some(ignore_list),
|
Some(ignore_list),
|
||||||
);
|
);
|
||||||
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
||||||
|
@ -446,9 +447,9 @@ mod tests {
|
||||||
#[nightly_only_test]
|
#[nightly_only_test]
|
||||||
#[test]
|
#[test]
|
||||||
fn handles_recoverable_parse_error_in_non_ignored_file() {
|
fn handles_recoverable_parse_error_in_non_ignored_file() {
|
||||||
let num_emitted_errors = Lrc::new(AtomicU32::new(0));
|
let num_emitted_errors = Arc::new(AtomicU32::new(0));
|
||||||
let can_reset_errors = Lrc::new(AtomicBool::new(false));
|
let can_reset_errors = Arc::new(AtomicBool::new(false));
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let source = String::from(r#"pub fn bar() { 1x; }"#);
|
let source = String::from(r#"pub fn bar() { 1x; }"#);
|
||||||
source_map.new_source_file(
|
source_map.new_source_file(
|
||||||
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
|
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
|
||||||
|
@ -456,9 +457,9 @@ mod tests {
|
||||||
);
|
);
|
||||||
let registry = Registry::new(&[]);
|
let registry = Registry::new(&[]);
|
||||||
let mut emitter = build_emitter(
|
let mut emitter = build_emitter(
|
||||||
Lrc::clone(&num_emitted_errors),
|
Arc::clone(&num_emitted_errors),
|
||||||
Lrc::clone(&can_reset_errors),
|
Arc::clone(&can_reset_errors),
|
||||||
Some(Lrc::clone(&source_map)),
|
Some(Arc::clone(&source_map)),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
||||||
|
@ -471,9 +472,9 @@ mod tests {
|
||||||
#[nightly_only_test]
|
#[nightly_only_test]
|
||||||
#[test]
|
#[test]
|
||||||
fn handles_mix_of_recoverable_parse_error() {
|
fn handles_mix_of_recoverable_parse_error() {
|
||||||
let num_emitted_errors = Lrc::new(AtomicU32::new(0));
|
let num_emitted_errors = Arc::new(AtomicU32::new(0));
|
||||||
let can_reset_errors = Lrc::new(AtomicBool::new(false));
|
let can_reset_errors = Arc::new(AtomicBool::new(false));
|
||||||
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
let source_map = Arc::new(SourceMap::new(FilePathMapping::empty()));
|
||||||
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
let ignore_list = get_ignore_list(r#"ignore = ["foo.rs"]"#);
|
||||||
let bar_source = String::from(r#"pub fn bar() { 1x; }"#);
|
let bar_source = String::from(r#"pub fn bar() { 1x; }"#);
|
||||||
let foo_source = String::from(r#"pub fn foo() { 1x; }"#);
|
let foo_source = String::from(r#"pub fn foo() { 1x; }"#);
|
||||||
|
@ -493,9 +494,9 @@ mod tests {
|
||||||
);
|
);
|
||||||
let registry = Registry::new(&[]);
|
let registry = Registry::new(&[]);
|
||||||
let mut emitter = build_emitter(
|
let mut emitter = build_emitter(
|
||||||
Lrc::clone(&num_emitted_errors),
|
Arc::clone(&num_emitted_errors),
|
||||||
Lrc::clone(&can_reset_errors),
|
Arc::clone(&can_reset_errors),
|
||||||
Some(Lrc::clone(&source_map)),
|
Some(Arc::clone(&source_map)),
|
||||||
Some(ignore_list),
|
Some(ignore_list),
|
||||||
);
|
);
|
||||||
let bar_span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
let bar_span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::NewlineStyle;
|
use crate::NewlineStyle;
|
||||||
use crate::config::FileName;
|
use crate::config::FileName;
|
||||||
|
@ -14,8 +15,6 @@ use crate::create_emitter;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use crate::formatting::FileRecord;
|
use crate::formatting::FileRecord;
|
||||||
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
|
|
||||||
// Append a newline to the end of each file.
|
// Append a newline to the end of each file.
|
||||||
pub(crate) fn append_newline(s: &mut String) {
|
pub(crate) fn append_newline(s: &mut String) {
|
||||||
s.push('\n');
|
s.push('\n');
|
||||||
|
@ -88,11 +87,11 @@ where
|
||||||
// source map instead of hitting the file system. This also supports getting
|
// source map instead of hitting the file system. This also supports getting
|
||||||
// original text for `FileName::Stdin`.
|
// original text for `FileName::Stdin`.
|
||||||
let original_text = if newline_style != NewlineStyle::Auto && *filename != FileName::Stdin {
|
let original_text = if newline_style != NewlineStyle::Auto && *filename != FileName::Stdin {
|
||||||
Lrc::new(fs::read_to_string(ensure_real_path(filename))?)
|
Arc::new(fs::read_to_string(ensure_real_path(filename))?)
|
||||||
} else {
|
} else {
|
||||||
match psess.and_then(|psess| psess.get_original_snippet(filename)) {
|
match psess.and_then(|psess| psess.get_original_snippet(filename)) {
|
||||||
Some(ori) => ori,
|
Some(ori) => ori,
|
||||||
None => Lrc::new(fs::read_to_string(ensure_real_path(filename))?),
|
None => Arc::new(fs::read_to_string(ensure_real_path(filename))?),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_ast::{ast, token::Delimiter, visit};
|
use rustc_ast::{ast, token::Delimiter, visit};
|
||||||
use rustc_data_structures::sync::Lrc;
|
|
||||||
use rustc_span::{BytePos, Pos, Span, symbol};
|
use rustc_span::{BytePos, Pos, Span, symbol};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ use crate::{ErrorKind, FormatReport, FormattingError};
|
||||||
/// Creates a string slice corresponding to the specified span.
|
/// Creates a string slice corresponding to the specified span.
|
||||||
pub(crate) struct SnippetProvider {
|
pub(crate) struct SnippetProvider {
|
||||||
/// A pointer to the content of the file we are formatting.
|
/// A pointer to the content of the file we are formatting.
|
||||||
big_snippet: Lrc<String>,
|
big_snippet: Arc<String>,
|
||||||
/// A position of the start of `big_snippet`, used as an offset.
|
/// A position of the start of `big_snippet`, used as an offset.
|
||||||
start_pos: usize,
|
start_pos: usize,
|
||||||
/// An end position of the file that this snippet lives.
|
/// An end position of the file that this snippet lives.
|
||||||
|
@ -46,7 +46,7 @@ impl SnippetProvider {
|
||||||
Some(&self.big_snippet[start_index..end_index])
|
Some(&self.big_snippet[start_index..end_index])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new(start_pos: BytePos, end_pos: BytePos, big_snippet: Lrc<String>) -> Self {
|
pub(crate) fn new(start_pos: BytePos, end_pos: BytePos, big_snippet: Arc<String>) -> Self {
|
||||||
let start_pos = start_pos.to_usize();
|
let start_pos = start_pos.to_usize();
|
||||||
let end_pos = end_pos.to_usize();
|
let end_pos = end_pos.to_usize();
|
||||||
SnippetProvider {
|
SnippetProvider {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue