Auto merge of #117881 - TaKO8Ki:rollup-n7jtmgj, r=TaKO8Ki
Rollup of 5 pull requests Successful merges: - #117737 (Remove `-Zkeep-hygiene-data`.) - #117830 (Small improvements in object lifetime default code) - #117858 (Compute layout with spans for better cycle errors in coroutines) - #117863 (Remove some unused stuff from `rustc_index`) - #117872 (Cranelift isn't available on non-nightly channels) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
531cb83fcf
16 changed files with 39 additions and 114 deletions
|
@ -792,8 +792,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
// debuggers and debugger extensions expect it to be called `__awaitee`. They use
|
// debuggers and debugger extensions expect it to be called `__awaitee`. They use
|
||||||
// this name to identify what is being awaited by a suspended async functions.
|
// this name to identify what is being awaited by a suspended async functions.
|
||||||
let awaitee_ident = Ident::with_dummy_span(sym::__awaitee);
|
let awaitee_ident = Ident::with_dummy_span(sym::__awaitee);
|
||||||
let (awaitee_pat, awaitee_pat_hid) =
|
let (awaitee_pat, awaitee_pat_hid) = self.pat_ident_binding_mode(
|
||||||
self.pat_ident_binding_mode(span, awaitee_ident, hir::BindingAnnotation::MUT);
|
gen_future_span,
|
||||||
|
awaitee_ident,
|
||||||
|
hir::BindingAnnotation::MUT,
|
||||||
|
);
|
||||||
|
|
||||||
let task_context_ident = Ident::with_dummy_span(sym::_task_context);
|
let task_context_ident = Ident::with_dummy_span(sym::_task_context);
|
||||||
|
|
||||||
|
|
|
@ -443,11 +443,6 @@ pub fn lower_to_hir(tcx: TyCtxt<'_>, (): ()) -> hir::Crate<'_> {
|
||||||
drop(ast_index);
|
drop(ast_index);
|
||||||
sess.time("drop_ast", || drop(krate));
|
sess.time("drop_ast", || drop(krate));
|
||||||
|
|
||||||
// Discard hygiene data, which isn't required after lowering to HIR.
|
|
||||||
if !sess.opts.unstable_opts.keep_hygiene_data {
|
|
||||||
rustc_span::hygiene::clear_syntax_context_map();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't hash unless necessary, because it's expensive.
|
// Don't hash unless necessary, because it's expensive.
|
||||||
let opt_hir_hash =
|
let opt_hir_hash =
|
||||||
if tcx.needs_crate_hash() { Some(compute_hir_hash(tcx, &owners)) } else { None };
|
if tcx.needs_crate_hash() { Some(compute_hir_hash(tcx, &owners)) } else { None };
|
||||||
|
|
|
@ -2847,6 +2847,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||||
/// provided, if they provided one, and otherwise search the supertypes of trait bounds
|
/// provided, if they provided one, and otherwise search the supertypes of trait bounds
|
||||||
/// for region bounds. It may be that we can derive no bound at all, in which case
|
/// for region bounds. It may be that we can derive no bound at all, in which case
|
||||||
/// we return `None`.
|
/// we return `None`.
|
||||||
|
#[instrument(level = "debug", skip(self, span), ret)]
|
||||||
fn compute_object_lifetime_bound(
|
fn compute_object_lifetime_bound(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
|
@ -2855,8 +2856,6 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||||
{
|
{
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
|
|
||||||
debug!("compute_opt_region_bound(existential_predicates={:?})", existential_predicates);
|
|
||||||
|
|
||||||
// No explicit region bound specified. Therefore, examine trait
|
// No explicit region bound specified. Therefore, examine trait
|
||||||
// bounds and see if we can derive region bounds from those.
|
// bounds and see if we can derive region bounds from those.
|
||||||
let derived_region_bounds = object_region_bounds(tcx, existential_predicates);
|
let derived_region_bounds = object_region_bounds(tcx, existential_predicates);
|
||||||
|
|
|
@ -1848,8 +1848,8 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(level = "debug", skip(self))]
|
||||||
fn resolve_object_lifetime_default(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
|
fn resolve_object_lifetime_default(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
|
||||||
debug!("resolve_object_lifetime_default(lifetime_ref={:?})", lifetime_ref);
|
|
||||||
let mut late_depth = 0;
|
let mut late_depth = 0;
|
||||||
let mut scope = self.scope;
|
let mut scope = self.scope;
|
||||||
let lifetime = loop {
|
let lifetime = loop {
|
||||||
|
|
|
@ -237,23 +237,12 @@ impl<T: Idx> BitSet<T> {
|
||||||
new_word != word
|
new_word != word
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets a slice of the underlying words.
|
|
||||||
pub fn words(&self) -> &[Word] {
|
|
||||||
&self.words
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterates over the indices of set bits in a sorted order.
|
/// Iterates over the indices of set bits in a sorted order.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn iter(&self) -> BitIter<'_, T> {
|
pub fn iter(&self) -> BitIter<'_, T> {
|
||||||
BitIter::new(&self.words)
|
BitIter::new(&self.words)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Duplicates the set as a hybrid set.
|
|
||||||
pub fn to_hybrid(&self) -> HybridBitSet<T> {
|
|
||||||
// Note: we currently don't bother trying to make a Sparse set.
|
|
||||||
HybridBitSet::Dense(self.to_owned())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set `self = self | other`. In contrast to `union` returns `true` if the set contains at
|
/// Set `self = self | other`. In contrast to `union` returns `true` if the set contains at
|
||||||
/// least one bit that is not in `other` (i.e. `other` is not a superset of `self`).
|
/// least one bit that is not in `other` (i.e. `other` is not a superset of `self`).
|
||||||
///
|
///
|
||||||
|
@ -1601,11 +1590,11 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
|
||||||
pub fn from_row_n(row: &BitSet<C>, num_rows: usize) -> BitMatrix<R, C> {
|
pub fn from_row_n(row: &BitSet<C>, num_rows: usize) -> BitMatrix<R, C> {
|
||||||
let num_columns = row.domain_size();
|
let num_columns = row.domain_size();
|
||||||
let words_per_row = num_words(num_columns);
|
let words_per_row = num_words(num_columns);
|
||||||
assert_eq!(words_per_row, row.words().len());
|
assert_eq!(words_per_row, row.words.len());
|
||||||
BitMatrix {
|
BitMatrix {
|
||||||
num_rows,
|
num_rows,
|
||||||
num_columns,
|
num_columns,
|
||||||
words: iter::repeat(row.words()).take(num_rows).flatten().cloned().collect(),
|
words: iter::repeat(&row.words).take(num_rows).flatten().cloned().collect(),
|
||||||
marker: PhantomData,
|
marker: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1700,9 +1689,9 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
|
||||||
assert_eq!(with.domain_size(), self.num_columns);
|
assert_eq!(with.domain_size(), self.num_columns);
|
||||||
let (write_start, write_end) = self.range(write);
|
let (write_start, write_end) = self.range(write);
|
||||||
let mut changed = false;
|
let mut changed = false;
|
||||||
for (read_index, write_index) in iter::zip(0..with.words().len(), write_start..write_end) {
|
for (read_index, write_index) in iter::zip(0..with.words.len(), write_start..write_end) {
|
||||||
let word = self.words[write_index];
|
let word = self.words[write_index];
|
||||||
let new_word = word | with.words()[read_index];
|
let new_word = word | with.words[read_index];
|
||||||
self.words[write_index] = new_word;
|
self.words[write_index] = new_word;
|
||||||
changed |= word != new_word;
|
changed |= word != new_word;
|
||||||
}
|
}
|
||||||
|
@ -2002,54 +1991,6 @@ impl std::fmt::Debug for FiniteBitSet<u32> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FiniteBitSetTy for u64 {
|
|
||||||
const DOMAIN_SIZE: u32 = 64;
|
|
||||||
|
|
||||||
const FILLED: Self = Self::MAX;
|
|
||||||
const EMPTY: Self = Self::MIN;
|
|
||||||
|
|
||||||
const ONE: Self = 1u64;
|
|
||||||
const ZERO: Self = 0u64;
|
|
||||||
|
|
||||||
fn checked_shl(self, rhs: u32) -> Option<Self> {
|
|
||||||
self.checked_shl(rhs)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn checked_shr(self, rhs: u32) -> Option<Self> {
|
|
||||||
self.checked_shr(rhs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for FiniteBitSet<u64> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{:064b}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FiniteBitSetTy for u128 {
|
|
||||||
const DOMAIN_SIZE: u32 = 128;
|
|
||||||
|
|
||||||
const FILLED: Self = Self::MAX;
|
|
||||||
const EMPTY: Self = Self::MIN;
|
|
||||||
|
|
||||||
const ONE: Self = 1u128;
|
|
||||||
const ZERO: Self = 0u128;
|
|
||||||
|
|
||||||
fn checked_shl(self, rhs: u32) -> Option<Self> {
|
|
||||||
self.checked_shl(rhs)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn checked_shr(self, rhs: u32) -> Option<Self> {
|
|
||||||
self.checked_shr(rhs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Debug for FiniteBitSet<u128> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{:0128b}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A fixed-sized bitset type represented by an integer type. Indices outwith than the range
|
/// A fixed-sized bitset type represented by an integer type. Indices outwith than the range
|
||||||
/// representable by `T` are considered set.
|
/// representable by `T` are considered set.
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Decodable, Encodable)]
|
#[derive(Copy, Clone, Eq, PartialEq, Decodable, Encodable)]
|
||||||
|
|
|
@ -137,10 +137,6 @@ impl<I: Idx, T> IndexVec<I, T> {
|
||||||
self.raw.truncate(a)
|
self.raw.truncate(a)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn convert_index_type<Ix: Idx>(self) -> IndexVec<Ix, T> {
|
|
||||||
IndexVec::from_raw(self.raw)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Grows the index vector so that it contains an entry for
|
/// Grows the index vector so that it contains an entry for
|
||||||
/// `elem`; if that is already true, then has no
|
/// `elem`; if that is already true, then has no
|
||||||
/// effect. Otherwise, inserts new values as needed by invoking
|
/// effect. Otherwise, inserts new values as needed by invoking
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
#![allow(dead_code)]
|
|
||||||
|
|
||||||
// Allows the macro invocation below to work
|
// Allows the macro invocation below to work
|
||||||
use crate as rustc_index;
|
use crate as rustc_index;
|
||||||
|
|
||||||
|
|
|
@ -679,7 +679,6 @@ fn test_unstable_options_tracking_hash() {
|
||||||
untracked!(incremental_info, true);
|
untracked!(incremental_info, true);
|
||||||
untracked!(incremental_verify_ich, true);
|
untracked!(incremental_verify_ich, true);
|
||||||
untracked!(input_stats, true);
|
untracked!(input_stats, true);
|
||||||
untracked!(keep_hygiene_data, true);
|
|
||||||
untracked!(link_native_libraries, false);
|
untracked!(link_native_libraries, false);
|
||||||
untracked!(llvm_time_trace, true);
|
untracked!(llvm_time_trace, true);
|
||||||
untracked!(ls, vec!["all".to_owned()]);
|
untracked!(ls, vec!["all".to_owned()]);
|
||||||
|
|
|
@ -1621,8 +1621,6 @@ options! {
|
||||||
`=skip-entry`
|
`=skip-entry`
|
||||||
`=skip-exit`
|
`=skip-exit`
|
||||||
Multiple options can be combined with commas."),
|
Multiple options can be combined with commas."),
|
||||||
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],
|
|
||||||
"keep hygiene data after analysis (default: no)"),
|
|
||||||
layout_seed: Option<u64> = (None, parse_opt_number, [TRACKED],
|
layout_seed: Option<u64> = (None, parse_opt_number, [TRACKED],
|
||||||
"seed layout randomization"),
|
"seed layout randomization"),
|
||||||
link_directives: bool = (true, parse_bool, [TRACKED],
|
link_directives: bool = (true, parse_bool, [TRACKED],
|
||||||
|
|
|
@ -569,10 +569,6 @@ impl HygieneData {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clear_syntax_context_map() {
|
|
||||||
HygieneData::with(|data| data.syntax_context_map = FxHashMap::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn walk_chain(span: Span, to: SyntaxContext) -> Span {
|
pub fn walk_chain(span: Span, to: SyntaxContext) -> Span {
|
||||||
HygieneData::with(|data| data.walk_chain(span, to))
|
HygieneData::with(|data| data.walk_chain(span, to))
|
||||||
}
|
}
|
||||||
|
|
|
@ -913,20 +913,15 @@ pub fn object_region_bounds<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
existential_predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
|
existential_predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
|
||||||
) -> Vec<ty::Region<'tcx>> {
|
) -> Vec<ty::Region<'tcx>> {
|
||||||
// Since we don't actually *know* the self type for an object,
|
|
||||||
// this "open(err)" serves as a kind of dummy standin -- basically
|
|
||||||
// a placeholder type.
|
|
||||||
let open_ty = Ty::new_fresh(tcx, 0);
|
|
||||||
|
|
||||||
let predicates = existential_predicates.iter().filter_map(|predicate| {
|
let predicates = existential_predicates.iter().filter_map(|predicate| {
|
||||||
if let ty::ExistentialPredicate::Projection(_) = predicate.skip_binder() {
|
if let ty::ExistentialPredicate::Projection(_) = predicate.skip_binder() {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
Some(predicate.with_self_ty(tcx, open_ty))
|
Some(predicate.with_self_ty(tcx, tcx.types.trait_object_dummy_self))
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
required_region_bounds(tcx, open_ty, predicates)
|
required_region_bounds(tcx, tcx.types.trait_object_dummy_self, predicates)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a set of predicates that apply to an object type, returns
|
/// Given a set of predicates that apply to an object type, returns
|
||||||
|
|
|
@ -740,11 +740,11 @@ fn coroutine_layout<'tcx>(
|
||||||
};
|
};
|
||||||
let tag_layout = cx.tcx.mk_layout(LayoutS::scalar(cx, tag));
|
let tag_layout = cx.tcx.mk_layout(LayoutS::scalar(cx, tag));
|
||||||
|
|
||||||
let promoted_layouts = ineligible_locals
|
let promoted_layouts = ineligible_locals.iter().map(|local| {
|
||||||
.iter()
|
let field_ty = subst_field(info.field_tys[local].ty);
|
||||||
.map(|local| subst_field(info.field_tys[local].ty))
|
let uninit_ty = Ty::new_maybe_uninit(tcx, field_ty);
|
||||||
.map(|ty| Ty::new_maybe_uninit(tcx, ty))
|
Ok(cx.spanned_layout_of(uninit_ty, info.field_tys[local].source_info.span)?.layout)
|
||||||
.map(|ty| Ok(cx.layout_of(ty)?.layout));
|
});
|
||||||
let prefix_layouts = args
|
let prefix_layouts = args
|
||||||
.as_coroutine()
|
.as_coroutine()
|
||||||
.prefix_tys()
|
.prefix_tys()
|
||||||
|
|
|
@ -1630,14 +1630,11 @@ impl Step for Extended {
|
||||||
prepare("rust-analysis");
|
prepare("rust-analysis");
|
||||||
prepare("clippy");
|
prepare("clippy");
|
||||||
prepare("rust-analyzer");
|
prepare("rust-analyzer");
|
||||||
for tool in &["rust-docs", "rust-demangler", "miri"] {
|
for tool in &["rust-docs", "rust-demangler", "miri", "rustc-codegen-cranelift"] {
|
||||||
if built_tools.contains(tool) {
|
if built_tools.contains(tool) {
|
||||||
prepare(tool);
|
prepare(tool);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("cranelift")) {
|
|
||||||
prepare("rustc-codegen-cranelift");
|
|
||||||
}
|
|
||||||
// create an 'uninstall' package
|
// create an 'uninstall' package
|
||||||
builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
|
builder.install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
|
||||||
pkgbuild("uninstall");
|
pkgbuild("uninstall");
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
Static,
|
Static,
|
||||||
),
|
),
|
||||||
source_info: SourceInfo {
|
source_info: SourceInfo {
|
||||||
span: $DIR/async_await.rs:16:9: 16:14 (#8),
|
span: $DIR/async_await.rs:16:5: 16:14 (#9),
|
||||||
scope: scope[0],
|
scope: scope[0],
|
||||||
},
|
},
|
||||||
ignore_for_traits: false,
|
ignore_for_traits: false,
|
||||||
|
@ -32,7 +32,7 @@
|
||||||
Static,
|
Static,
|
||||||
),
|
),
|
||||||
source_info: SourceInfo {
|
source_info: SourceInfo {
|
||||||
span: $DIR/async_await.rs:17:9: 17:14 (#10),
|
span: $DIR/async_await.rs:17:5: 17:14 (#11),
|
||||||
scope: scope[0],
|
scope: scope[0],
|
||||||
},
|
},
|
||||||
ignore_for_traits: false,
|
ignore_for_traits: false,
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// edition: 2021
|
// edition: 2021
|
||||||
// build-fail
|
// build-fail
|
||||||
//~^^ ERROR cycle detected when computing layout of
|
|
||||||
|
|
||||||
#![feature(impl_trait_in_assoc_type)]
|
#![feature(impl_trait_in_assoc_type)]
|
||||||
|
|
||||||
|
@ -21,6 +20,7 @@ impl Recur for () {
|
||||||
|
|
||||||
fn recur(self) -> Self::Recur {
|
fn recur(self) -> Self::Recur {
|
||||||
async move { recur(self).await; }
|
async move { recur(self).await; }
|
||||||
|
//~^ ERROR cycle detected when computing layout of
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,14 +1,22 @@
|
||||||
error[E0391]: cycle detected when computing layout of `{async block@$DIR/indirect-recursion-issue-112047.rs:23:9: 23:42}`
|
error[E0391]: cycle detected when computing layout of `{async block@$DIR/indirect-recursion-issue-112047.rs:22:9: 22:42}`
|
||||||
|
--> $DIR/indirect-recursion-issue-112047.rs:22:22
|
||||||
|
|
|
|
||||||
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<{async fn body@$DIR/indirect-recursion-issue-112047.rs:15:31: 17:2}>`...
|
LL | async move { recur(self).await; }
|
||||||
= note: ...which requires computing layout of `core::mem::manually_drop::ManuallyDrop<{async fn body@$DIR/indirect-recursion-issue-112047.rs:15:31: 17:2}>`...
|
| ^^^^^^^^^^^^^^^^^
|
||||||
= note: ...which requires computing layout of `{async fn body@$DIR/indirect-recursion-issue-112047.rs:15:31: 17:2}`...
|
|
|
||||||
|
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<{async fn body@$DIR/indirect-recursion-issue-112047.rs:14:31: 16:2}>`...
|
||||||
|
= note: ...which requires computing layout of `core::mem::manually_drop::ManuallyDrop<{async fn body@$DIR/indirect-recursion-issue-112047.rs:14:31: 16:2}>`...
|
||||||
|
note: ...which requires computing layout of `{async fn body@$DIR/indirect-recursion-issue-112047.rs:14:31: 16:2}`...
|
||||||
|
--> $DIR/indirect-recursion-issue-112047.rs:15:5
|
||||||
|
|
|
||||||
|
LL | t.recur().await;
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<<() as Recur>::Recur>`...
|
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<<() as Recur>::Recur>`...
|
||||||
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<{async block@$DIR/indirect-recursion-issue-112047.rs:23:9: 23:42}>`...
|
= note: ...which requires computing layout of `core::mem::maybe_uninit::MaybeUninit<{async block@$DIR/indirect-recursion-issue-112047.rs:22:9: 22:42}>`...
|
||||||
= note: ...which requires computing layout of `core::mem::manually_drop::ManuallyDrop<{async block@$DIR/indirect-recursion-issue-112047.rs:23:9: 23:42}>`...
|
= note: ...which requires computing layout of `core::mem::manually_drop::ManuallyDrop<{async block@$DIR/indirect-recursion-issue-112047.rs:22:9: 22:42}>`...
|
||||||
= note: ...which again requires computing layout of `{async block@$DIR/indirect-recursion-issue-112047.rs:23:9: 23:42}`, completing the cycle
|
= note: ...which again requires computing layout of `{async block@$DIR/indirect-recursion-issue-112047.rs:22:9: 22:42}`, completing the cycle
|
||||||
note: cycle used when elaborating drops for `<impl at $DIR/indirect-recursion-issue-112047.rs:19:1: 19:18>::recur`
|
note: cycle used when elaborating drops for `<impl at $DIR/indirect-recursion-issue-112047.rs:18:1: 18:18>::recur`
|
||||||
--> $DIR/indirect-recursion-issue-112047.rs:22:5
|
--> $DIR/indirect-recursion-issue-112047.rs:21:5
|
||||||
|
|
|
|
||||||
LL | fn recur(self) -> Self::Recur {
|
LL | fn recur(self) -> Self::Recur {
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue