Auto merge of #136471 - safinaskar:parallel, r=SparrowLii
tree-wide: parallel: Fully removed all `Lrc`, replaced with `Arc` tree-wide: parallel: Fully removed all `Lrc`, replaced with `Arc` This is continuation of https://github.com/rust-lang/rust/pull/132282 . I'm pretty sure I did everything right. In particular, I searched all occurrences of `Lrc` in submodules and made sure that they don't need replacement. There are other possibilities, through. We can define `enum Lrc<T> { Rc(Rc<T>), Arc(Arc<T>) }`. Or we can make `Lrc` a union and on every clone we can read from special thread-local variable. Or we can add a generic parameter to `Lrc` and, yes, this parameter will be everywhere across all codebase. So, if you think we should take some alternative approach, then don't merge this PR. But if it is decided to stick with `Arc`, then, please, merge. cc "Parallel Rustc Front-end" ( https://github.com/rust-lang/rust/issues/113349 ) r? SparrowLii `@rustbot` label WG-compiler-parallel
This commit is contained in:
commit
2f92f050e8
77 changed files with 405 additions and 395 deletions
|
@ -19,6 +19,7 @@
|
|||
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp, fmt};
|
||||
|
||||
pub use GenericArgs::*;
|
||||
|
@ -27,7 +28,6 @@ pub use rustc_ast_ir::{Movability, Mutability, Pinnedness};
|
|||
use rustc_data_structures::packed::Pu128;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::tagged_ptr::Tag;
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
pub use rustc_span::AttrId;
|
||||
|
@ -1611,7 +1611,7 @@ pub enum ExprKind {
|
|||
/// Added for optimization purposes to avoid the need to escape
|
||||
/// large binary blobs - should always behave like [`ExprKind::Lit`]
|
||||
/// with a `ByteStr` literal.
|
||||
IncludedBytes(Lrc<[u8]>),
|
||||
IncludedBytes(Arc<[u8]>),
|
||||
|
||||
/// A `format_args!()` expression.
|
||||
FormatArgs(P<FormatArgs>),
|
||||
|
@ -1904,9 +1904,9 @@ pub enum LitKind {
|
|||
Str(Symbol, StrStyle),
|
||||
/// A byte string (`b"foo"`). Not stored as a symbol because it might be
|
||||
/// non-utf8, and symbols only allow utf8 strings.
|
||||
ByteStr(Lrc<[u8]>, StrStyle),
|
||||
ByteStr(Arc<[u8]>, StrStyle),
|
||||
/// A C String (`c"foo"`). Guaranteed to only have `\0` at the end.
|
||||
CStr(Lrc<[u8]>, StrStyle),
|
||||
CStr(Arc<[u8]>, StrStyle),
|
||||
/// A byte char (`b'f'`).
|
||||
Byte(u8),
|
||||
/// A character literal (`'a'`).
|
||||
|
|
|
@ -9,10 +9,10 @@
|
|||
|
||||
use std::ops::DerefMut;
|
||||
use std::panic;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::{Ident, Span};
|
||||
use smallvec::{Array, SmallVec, smallvec};
|
||||
|
@ -793,14 +793,14 @@ fn visit_tt<T: MutVisitor>(vis: &mut T, tt: &mut TokenTree) {
|
|||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
fn visit_tts<T: MutVisitor>(vis: &mut T, TokenStream(tts): &mut TokenStream) {
|
||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||
let tts = Lrc::make_mut(tts);
|
||||
let tts = Arc::make_mut(tts);
|
||||
visit_vec(tts, |tree| visit_tt(vis, tree));
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_attr_tts<T: MutVisitor>(vis: &mut T, AttrTokenStream(tts): &mut AttrTokenStream) {
|
||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||
let tts = Lrc::make_mut(tts);
|
||||
let tts = Arc::make_mut(tts);
|
||||
visit_vec(tts, |tree| visit_attr_tt(vis, tree));
|
||||
}
|
||||
}
|
||||
|
@ -840,7 +840,7 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
|
|||
vis.visit_ident(ident);
|
||||
}
|
||||
token::Interpolated(nt) => {
|
||||
let nt = Lrc::make_mut(nt);
|
||||
let nt = Arc::make_mut(nt);
|
||||
visit_nonterminal(vis, nt);
|
||||
}
|
||||
_ => {}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use BinOpToken::*;
|
||||
pub use LitKind::*;
|
||||
|
@ -8,7 +9,6 @@ pub use NtExprKind::*;
|
|||
pub use NtPatKind::*;
|
||||
pub use TokenKind::*;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
|
||||
|
@ -451,7 +451,7 @@ pub enum TokenKind {
|
|||
/// The span in the surrounding `Token` is that of the metavariable in the
|
||||
/// macro's RHS. The span within the Nonterminal is that of the fragment
|
||||
/// passed to the macro at the call site.
|
||||
Interpolated(Lrc<Nonterminal>),
|
||||
Interpolated(Arc<Nonterminal>),
|
||||
|
||||
/// A doc comment token.
|
||||
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
|
||||
|
@ -469,7 +469,7 @@ impl Clone for TokenKind {
|
|||
// a copy. This is faster than the `derive(Clone)` version which has a
|
||||
// separate path for every variant.
|
||||
match self {
|
||||
Interpolated(nt) => Interpolated(Lrc::clone(nt)),
|
||||
Interpolated(nt) => Interpolated(Arc::clone(nt)),
|
||||
_ => unsafe { std::ptr::read(self) },
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,10 +14,11 @@
|
|||
//! ownership of the original.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp, fmt, iter};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
use rustc_data_structures::sync;
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
|
||||
|
@ -119,11 +120,11 @@ impl ToAttrTokenStream for AttrTokenStream {
|
|||
/// of an actual `TokenStream` until it is needed.
|
||||
/// `Box` is here only to reduce the structure size.
|
||||
#[derive(Clone)]
|
||||
pub struct LazyAttrTokenStream(Lrc<Box<dyn ToAttrTokenStream>>);
|
||||
pub struct LazyAttrTokenStream(Arc<Box<dyn ToAttrTokenStream>>);
|
||||
|
||||
impl LazyAttrTokenStream {
|
||||
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
|
||||
LazyAttrTokenStream(Lrc::new(Box::new(inner)))
|
||||
LazyAttrTokenStream(Arc::new(Box::new(inner)))
|
||||
}
|
||||
|
||||
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||
|
@ -160,7 +161,7 @@ impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
|
|||
/// during expansion to perform early cfg-expansion, and to process attributes
|
||||
/// during proc-macro invocations.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
|
||||
pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);
|
||||
|
||||
/// Like `TokenTree`, but for `AttrTokenStream`.
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
|
@ -175,7 +176,7 @@ pub enum AttrTokenTree {
|
|||
|
||||
impl AttrTokenStream {
|
||||
pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
|
||||
AttrTokenStream(Lrc::new(tokens))
|
||||
AttrTokenStream(Arc::new(tokens))
|
||||
}
|
||||
|
||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
|
||||
|
@ -293,7 +294,7 @@ pub struct AttrsTarget {
|
|||
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
|
||||
/// backwards compatibility.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
|
||||
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
|
||||
|
||||
/// Indicates whether a token can join with the following token to form a
|
||||
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
|
||||
|
@ -412,7 +413,7 @@ impl PartialEq<TokenStream> for TokenStream {
|
|||
|
||||
impl TokenStream {
|
||||
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(tts))
|
||||
TokenStream(Arc::new(tts))
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
|
@ -544,7 +545,7 @@ impl TokenStream {
|
|||
/// Push `tt` onto the end of the stream, possibly gluing it to the last
|
||||
/// token. Uses `make_mut` to maximize efficiency.
|
||||
pub fn push_tree(&mut self, tt: TokenTree) {
|
||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
||||
let vec_mut = Arc::make_mut(&mut self.0);
|
||||
|
||||
if Self::try_glue_to_last(vec_mut, &tt) {
|
||||
// nothing else to do
|
||||
|
@ -557,7 +558,7 @@ impl TokenStream {
|
|||
/// token tree to the last token. (No other token trees will be glued.)
|
||||
/// Uses `make_mut` to maximize efficiency.
|
||||
pub fn push_stream(&mut self, stream: TokenStream) {
|
||||
let vec_mut = Lrc::make_mut(&mut self.0);
|
||||
let vec_mut = Arc::make_mut(&mut self.0);
|
||||
|
||||
let stream_iter = stream.0.iter().cloned();
|
||||
|
||||
|
@ -577,7 +578,7 @@ impl TokenStream {
|
|||
}
|
||||
|
||||
/// Desugar doc comments like `/// foo` in the stream into `#[doc =
|
||||
/// r"foo"]`. Modifies the `TokenStream` via `Lrc::make_mut`, but as little
|
||||
/// r"foo"]`. Modifies the `TokenStream` via `Arc::make_mut`, but as little
|
||||
/// as possible.
|
||||
pub fn desugar_doc_comments(&mut self) {
|
||||
if let Some(desugared_stream) = desugar_inner(self.clone()) {
|
||||
|
@ -596,7 +597,7 @@ impl TokenStream {
|
|||
) => {
|
||||
let desugared = desugared_tts(attr_style, data, span);
|
||||
let desugared_len = desugared.len();
|
||||
Lrc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
|
||||
Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
|
||||
modified = true;
|
||||
i += desugared_len;
|
||||
}
|
||||
|
@ -607,7 +608,7 @@ impl TokenStream {
|
|||
if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
|
||||
let new_tt =
|
||||
TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
|
||||
Lrc::make_mut(&mut stream.0)[i] = new_tt;
|
||||
Arc::make_mut(&mut stream.0)[i] = new_tt;
|
||||
modified = true;
|
||||
}
|
||||
i += 1;
|
||||
|
|
|
@ -121,7 +121,7 @@ impl LitKind {
|
|||
}
|
||||
token::ByteStrRaw(n) => {
|
||||
// Raw strings have no escapes so we can convert the symbol
|
||||
// directly to a `Lrc<u8>`.
|
||||
// directly to a `Arc<u8>`.
|
||||
let buf = symbol.as_str().to_owned().into_bytes();
|
||||
LitKind::ByteStr(buf.into(), StrStyle::Raw(n))
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ impl LitKind {
|
|||
}
|
||||
token::CStrRaw(n) => {
|
||||
// Raw strings have no escapes so we can convert the symbol
|
||||
// directly to a `Lrc<u8>` after appending the terminating NUL
|
||||
// directly to a `Arc<u8>` after appending the terminating NUL
|
||||
// char.
|
||||
let mut buf = symbol.as_str().to_owned().into_bytes();
|
||||
buf.push(0);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue