Skip to content

Commit

Permalink
Auto merge of #136471 - safinaskar:parallel, r=<try>
Browse files Browse the repository at this point in the history
tree-wide: parallel: Fully removed all `Lrc`, replaced with `Arc`

tree-wide: parallel: Fully removed all `Lrc`, replaced with `Arc`

This is continuation of #132282 .

I'm pretty sure I did everything right. In particular, I searched all occurrences of `Lrc` in submodules and made sure that they don't need replacement.

There are other possibilities, through.

We can define `enum Lrc<T> { Rc(Rc<T>), Arc(Arc<T>) }`. Or we can make `Lrc` a union and on every clone we can read from special thread-local variable. Or we can add a generic parameter to `Lrc` and, yes, this parameter will be everywhere across all codebase.

So, if you think we should take some alternative approach, then don't merge this PR. But if it is decided to stick with `Arc`, then, please, merge.

cc "Parallel Rustc Front-end" ( #113349 )

r? SparrowLii

`@rustbot` label WG-compiler-parallel
  • Loading branch information
bors committed Feb 5, 2025
2 parents 8df89d1 + 0a21f1d commit c1d3b4f
Show file tree
Hide file tree
Showing 77 changed files with 405 additions and 395 deletions.
8 changes: 4 additions & 4 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
use std::borrow::Cow;
use std::sync::Arc;
use std::{cmp, fmt};

pub use GenericArgs::*;
Expand All @@ -27,7 +28,6 @@ pub use rustc_ast_ir::{Movability, Mutability, Pinnedness};
use rustc_data_structures::packed::Pu128;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::tagged_ptr::Tag;
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
pub use rustc_span::AttrId;
Expand Down Expand Up @@ -1611,7 +1611,7 @@ pub enum ExprKind {
/// Added for optimization purposes to avoid the need to escape
/// large binary blobs - should always behave like [`ExprKind::Lit`]
/// with a `ByteStr` literal.
IncludedBytes(Lrc<[u8]>),
IncludedBytes(Arc<[u8]>),

/// A `format_args!()` expression.
FormatArgs(P<FormatArgs>),
Expand Down Expand Up @@ -1904,9 +1904,9 @@ pub enum LitKind {
Str(Symbol, StrStyle),
/// A byte string (`b"foo"`). Not stored as a symbol because it might be
/// non-utf8, and symbols only allow utf8 strings.
ByteStr(Lrc<[u8]>, StrStyle),
ByteStr(Arc<[u8]>, StrStyle),
/// A C String (`c"foo"`). Guaranteed to only have `\0` at the end.
CStr(Lrc<[u8]>, StrStyle),
CStr(Arc<[u8]>, StrStyle),
/// A byte char (`b'f'`).
Byte(u8),
/// A character literal (`'a'`).
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
use std::ops::DerefMut;
use std::panic;
use std::sync::Arc;

use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::{Ident, Span};
use smallvec::{Array, SmallVec, smallvec};
Expand Down Expand Up @@ -789,14 +789,14 @@ fn visit_tt<T: MutVisitor>(vis: &mut T, tt: &mut TokenTree) {
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
fn visit_tts<T: MutVisitor>(vis: &mut T, TokenStream(tts): &mut TokenStream) {
if T::VISIT_TOKENS && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
let tts = Arc::make_mut(tts);
visit_vec(tts, |tree| visit_tt(vis, tree));
}
}

fn visit_attr_tts<T: MutVisitor>(vis: &mut T, AttrTokenStream(tts): &mut AttrTokenStream) {
if T::VISIT_TOKENS && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
let tts = Arc::make_mut(tts);
visit_vec(tts, |tree| visit_attr_tt(vis, tree));
}
}
Expand Down Expand Up @@ -836,7 +836,7 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
vis.visit_ident(ident);
}
token::Interpolated(nt) => {
let nt = Lrc::make_mut(nt);
let nt = Arc::make_mut(nt);
visit_nonterminal(vis, nt);
}
_ => {}
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::borrow::Cow;
use std::fmt;
use std::sync::Arc;

pub use BinOpToken::*;
pub use LitKind::*;
Expand All @@ -8,7 +9,6 @@ pub use NtExprKind::*;
pub use NtPatKind::*;
pub use TokenKind::*;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
use rustc_span::edition::Edition;
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
Expand Down Expand Up @@ -451,7 +451,7 @@ pub enum TokenKind {
/// The span in the surrounding `Token` is that of the metavariable in the
/// macro's RHS. The span within the Nonterminal is that of the fragment
/// passed to the macro at the call site.
Interpolated(Lrc<Nonterminal>),
Interpolated(Arc<Nonterminal>),

/// A doc comment token.
/// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
Expand All @@ -469,7 +469,7 @@ impl Clone for TokenKind {
// a copy. This is faster than the `derive(Clone)` version which has a
// separate path for every variant.
match self {
Interpolated(nt) => Interpolated(Lrc::clone(nt)),
Interpolated(nt) => Interpolated(Arc::clone(nt)),
_ => unsafe { std::ptr::read(self) },
}
}
Expand Down
25 changes: 13 additions & 12 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@
//! ownership of the original.
use std::borrow::Cow;
use std::sync::Arc;
use std::{cmp, fmt, iter};

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, Lrc};
use rustc_data_structures::sync;
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
use rustc_serialize::{Decodable, Encodable};
use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
Expand Down Expand Up @@ -119,11 +120,11 @@ impl ToAttrTokenStream for AttrTokenStream {
/// of an actual `TokenStream` until it is needed.
/// `Box` is here only to reduce the structure size.
#[derive(Clone)]
pub struct LazyAttrTokenStream(Lrc<Box<dyn ToAttrTokenStream>>);
pub struct LazyAttrTokenStream(Arc<Box<dyn ToAttrTokenStream>>);

impl LazyAttrTokenStream {
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
LazyAttrTokenStream(Lrc::new(Box::new(inner)))
LazyAttrTokenStream(Arc::new(Box::new(inner)))
}

pub fn to_attr_token_stream(&self) -> AttrTokenStream {
Expand Down Expand Up @@ -160,7 +161,7 @@ impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
/// during expansion to perform early cfg-expansion, and to process attributes
/// during proc-macro invocations.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
pub struct AttrTokenStream(pub Arc<Vec<AttrTokenTree>>);

/// Like `TokenTree`, but for `AttrTokenStream`.
#[derive(Clone, Debug, Encodable, Decodable)]
Expand All @@ -175,7 +176,7 @@ pub enum AttrTokenTree {

impl AttrTokenStream {
pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
AttrTokenStream(Lrc::new(tokens))
AttrTokenStream(Arc::new(tokens))
}

/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. During
Expand Down Expand Up @@ -293,7 +294,7 @@ pub struct AttrsTarget {
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
/// backwards compatibility.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);

/// Indicates whether a token can join with the following token to form a
/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
Expand Down Expand Up @@ -412,7 +413,7 @@ impl PartialEq<TokenStream> for TokenStream {

impl TokenStream {
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
TokenStream(Lrc::new(tts))
TokenStream(Arc::new(tts))
}

pub fn is_empty(&self) -> bool {
Expand Down Expand Up @@ -544,7 +545,7 @@ impl TokenStream {
/// Push `tt` onto the end of the stream, possibly gluing it to the last
/// token. Uses `make_mut` to maximize efficiency.
pub fn push_tree(&mut self, tt: TokenTree) {
let vec_mut = Lrc::make_mut(&mut self.0);
let vec_mut = Arc::make_mut(&mut self.0);

if Self::try_glue_to_last(vec_mut, &tt) {
// nothing else to do
Expand All @@ -557,7 +558,7 @@ impl TokenStream {
/// token tree to the last token. (No other token trees will be glued.)
/// Uses `make_mut` to maximize efficiency.
pub fn push_stream(&mut self, stream: TokenStream) {
let vec_mut = Lrc::make_mut(&mut self.0);
let vec_mut = Arc::make_mut(&mut self.0);

let stream_iter = stream.0.iter().cloned();

Expand All @@ -577,7 +578,7 @@ impl TokenStream {
}

/// Desugar doc comments like `/// foo` in the stream into `#[doc =
/// r"foo"]`. Modifies the `TokenStream` via `Lrc::make_mut`, but as little
/// r"foo"]`. Modifies the `TokenStream` via `Arc::make_mut`, but as little
/// as possible.
pub fn desugar_doc_comments(&mut self) {
if let Some(desugared_stream) = desugar_inner(self.clone()) {
Expand All @@ -596,7 +597,7 @@ impl TokenStream {
) => {
let desugared = desugared_tts(attr_style, data, span);
let desugared_len = desugared.len();
Lrc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
Arc::make_mut(&mut stream.0).splice(i..i + 1, desugared);
modified = true;
i += desugared_len;
}
Expand All @@ -607,7 +608,7 @@ impl TokenStream {
if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
let new_tt =
TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
Lrc::make_mut(&mut stream.0)[i] = new_tt;
Arc::make_mut(&mut stream.0)[i] = new_tt;
modified = true;
}
i += 1;
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_ast/src/util/literal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ impl LitKind {
}
token::ByteStrRaw(n) => {
// Raw strings have no escapes so we can convert the symbol
// directly to a `Lrc<u8>`.
// directly to a `Arc<u8>`.
let buf = symbol.as_str().to_owned().into_bytes();
LitKind::ByteStr(buf.into(), StrStyle::Raw(n))
}
Expand All @@ -142,7 +142,7 @@ impl LitKind {
}
token::CStrRaw(n) => {
// Raw strings have no escapes so we can convert the symbol
// directly to a `Lrc<u8>` after appending the terminating NUL
// directly to a `Arc<u8>` after appending the terminating NUL
// char.
let mut buf = symbol.as_str().to_owned().into_bytes();
buf.push(0);
Expand Down
22 changes: 11 additions & 11 deletions compiler/rustc_ast_lowering/src/expr.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
use std::assert_matches::assert_matches;
use std::ops::ControlFlow;
use std::sync::Arc;

use rustc_ast::ptr::P as AstP;
use rustc_ast::*;
use rustc_ast_pretty::pprust::expr_to_string;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
use rustc_hir as hir;
use rustc_hir::HirId;
use rustc_hir::def::{DefKind, Res};
Expand Down Expand Up @@ -147,7 +147,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
ExprKind::IncludedBytes(bytes) => {
let lit = self.arena.alloc(respan(
self.lower_span(e.span),
LitKind::ByteStr(Lrc::clone(bytes), StrStyle::Cooked),
LitKind::ByteStr(Arc::clone(bytes), StrStyle::Cooked),
));
hir::ExprKind::Lit(lit)
}
Expand Down Expand Up @@ -599,15 +599,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
this.mark_span_with_reason(
DesugaringKind::TryBlock,
expr.span,
Some(Lrc::clone(&this.allow_try_trait)),
Some(Arc::clone(&this.allow_try_trait)),
),
expr,
)
} else {
let try_span = this.mark_span_with_reason(
DesugaringKind::TryBlock,
this.tcx.sess.source_map().end_point(body.span),
Some(Lrc::clone(&this.allow_try_trait)),
Some(Arc::clone(&this.allow_try_trait)),
);

(try_span, this.expr_unit(try_span))
Expand Down Expand Up @@ -716,7 +716,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason(
DesugaringKind::Async,
self.lower_span(span),
Some(Lrc::clone(&self.allow_gen_future)),
Some(Arc::clone(&self.allow_gen_future)),
);
let resume_ty =
self.make_lang_item_qpath(hir::LangItem::ResumeTy, unstable_span, None);
Expand Down Expand Up @@ -800,7 +800,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason(
DesugaringKind::Async,
span,
Some(Lrc::clone(&self.allow_gen_future)),
Some(Arc::clone(&self.allow_gen_future)),
);
self.lower_attrs(inner_hir_id, &[Attribute {
kind: AttrKind::Normal(ptr::P(NormalAttr::from_ident(Ident::new(
Expand Down Expand Up @@ -876,13 +876,13 @@ impl<'hir> LoweringContext<'_, 'hir> {

let features = match await_kind {
FutureKind::Future => None,
FutureKind::AsyncIterator => Some(Lrc::clone(&self.allow_for_await)),
FutureKind::AsyncIterator => Some(Arc::clone(&self.allow_for_await)),
};
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, features);
let gen_future_span = self.mark_span_with_reason(
DesugaringKind::Await,
full_span,
Some(Lrc::clone(&self.allow_gen_future)),
Some(Arc::clone(&self.allow_gen_future)),
);
let expr_hir_id = expr.hir_id;

Expand Down Expand Up @@ -1926,13 +1926,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason(
DesugaringKind::QuestionMark,
span,
Some(Lrc::clone(&self.allow_try_trait)),
Some(Arc::clone(&self.allow_try_trait)),
);
let try_span = self.tcx.sess.source_map().end_point(span);
let try_span = self.mark_span_with_reason(
DesugaringKind::QuestionMark,
try_span,
Some(Lrc::clone(&self.allow_try_trait)),
Some(Arc::clone(&self.allow_try_trait)),
);

// `Try::branch(<expr>)`
Expand Down Expand Up @@ -2026,7 +2026,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let unstable_span = self.mark_span_with_reason(
DesugaringKind::YeetExpr,
span,
Some(Lrc::clone(&self.allow_try_trait)),
Some(Arc::clone(&self.allow_try_trait)),
);

let from_yeet_expr = self.wrap_in_try_constructor(
Expand Down
17 changes: 9 additions & 8 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,14 @@
#![warn(unreachable_pub)]
// tidy-alphabetical-end

use std::sync::Arc;

use rustc_ast::node_id::NodeMap;
use rustc_ast::{self as ast, *};
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::tagged_ptr::TaggedRef;
use rustc_errors::{DiagArgFromDisplay, DiagCtxtHandle, StashKey};
use rustc_hir::def::{DefKind, LifetimeRes, Namespace, PartialRes, PerNS, Res};
Expand Down Expand Up @@ -129,11 +130,11 @@ struct LoweringContext<'a, 'hir> {
#[cfg(debug_assertions)]
node_id_to_local_id: NodeMap<hir::ItemLocalId>,

allow_try_trait: Lrc<[Symbol]>,
allow_gen_future: Lrc<[Symbol]>,
allow_async_iterator: Lrc<[Symbol]>,
allow_for_await: Lrc<[Symbol]>,
allow_async_fn_traits: Lrc<[Symbol]>,
allow_try_trait: Arc<[Symbol]>,
allow_gen_future: Arc<[Symbol]>,
allow_async_iterator: Arc<[Symbol]>,
allow_for_await: Arc<[Symbol]>,
allow_async_fn_traits: Arc<[Symbol]>,
}

impl<'a, 'hir> LoweringContext<'a, 'hir> {
Expand Down Expand Up @@ -722,7 +723,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
&self,
reason: DesugaringKind,
span: Span,
allow_internal_unstable: Option<Lrc<[Symbol]>>,
allow_internal_unstable: Option<Arc<[Symbol]>>,
) -> Span {
self.tcx.with_stable_hashing_context(|hcx| {
span.mark_with_reason(allow_internal_unstable, reason, span.edition(), hcx)
Expand Down Expand Up @@ -1666,7 +1667,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
CoroutineKind::Async { return_impl_trait_id, .. } => (return_impl_trait_id, None),
CoroutineKind::Gen { return_impl_trait_id, .. } => (return_impl_trait_id, None),
CoroutineKind::AsyncGen { return_impl_trait_id, .. } => {
(return_impl_trait_id, Some(Lrc::clone(&self.allow_async_iterator)))
(return_impl_trait_id, Some(Arc::clone(&self.allow_async_iterator)))
}
};

Expand Down
Loading

0 comments on commit c1d3b4f

Please sign in to comment.