Auto merge of #107811 - matthiaskrgr:rollup-rpjzshk, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #105641 (Implement cursors for BTreeMap) - #107271 (Treat Drop as a rmw operation) - #107710 (Update strip-ansi-escapes and vte) - #107758 (Change `arena_cache` to not alter the declared query result) - #107777 (Make `derive_const` derive properly const-if-const impls) - #107780 (Rename `replace_bound_vars_with_*` to `instantiate_binder_with_*`) - #107793 (Add missing tracking issue for `RawOsError`) - #107807 (Fix small debug typo) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ef934d9b63
53 changed files with 1248 additions and 210 deletions
38
Cargo.lock
38
Cargo.lock
|
@ -127,6 +127,12 @@ version = "1.0.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f8cb5d814eb646a863c4f24978cff2880c4be96ad8cde2c0f0678732902e271"
|
checksum = "8f8cb5d814eb646a863c4f24978cff2880c4be96ad8cde2c0f0678732902e271"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arrayvec"
|
||||||
|
version = "0.5.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arrayvec"
|
name = "arrayvec"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
|
@ -791,7 +797,7 @@ dependencies = [
|
||||||
name = "clippy_utils"
|
name = "clippy_utils"
|
||||||
version = "0.1.69"
|
version = "0.1.69"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec 0.7.0",
|
||||||
"if_chain",
|
"if_chain",
|
||||||
"itertools",
|
"itertools",
|
||||||
"rustc-semver",
|
"rustc-semver",
|
||||||
|
@ -3912,7 +3918,7 @@ dependencies = [
|
||||||
name = "rustc_data_structures"
|
name = "rustc_data_structures"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec 0.7.0",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"ena",
|
"ena",
|
||||||
|
@ -4169,7 +4175,7 @@ dependencies = [
|
||||||
name = "rustc_index"
|
name = "rustc_index"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec 0.7.0",
|
||||||
"rustc_macros",
|
"rustc_macros",
|
||||||
"rustc_serialize",
|
"rustc_serialize",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
@ -4866,7 +4872,7 @@ dependencies = [
|
||||||
name = "rustdoc"
|
name = "rustdoc"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec 0.7.0",
|
||||||
"askama",
|
"askama",
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
@ -5375,9 +5381,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strip-ansi-escapes"
|
name = "strip-ansi-escapes"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9d63676e2abafa709460982ddc02a3bb586b6d15a49b75c212e06edd3933acee"
|
checksum = "011cbb39cf7c1f62871aea3cc46e5817b0937b49e9447370c93cacbe93a766d8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"vte",
|
"vte",
|
||||||
]
|
]
|
||||||
|
@ -6089,9 +6095,9 @@ checksum = "05e42f7c18b8f902290b009cde6d651262f956c98bc51bca4cd1d511c9cd85c7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "utf8parse"
|
name = "utf8parse"
|
||||||
version = "0.1.1"
|
version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
|
checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
|
@ -6122,11 +6128,23 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "vte"
|
name = "vte"
|
||||||
version = "0.3.3"
|
version = "0.10.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4f42f536e22f7fcbb407639765c8fd78707a33109301f834a594758bedd6e8cf"
|
checksum = "6cbce692ab4ca2f1f3047fcf732430249c0e971bfdd2b234cf2c47ad93af5983"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"arrayvec 0.5.2",
|
||||||
"utf8parse",
|
"utf8parse",
|
||||||
|
"vte_generate_state_changes",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vte_generate_state_changes"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -1567,8 +1567,18 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
match bound {
|
match bound {
|
||||||
GenericBound::Trait(tref, modifier) => {
|
GenericBound::Trait(tref, modifier) => {
|
||||||
if modifier == &TraitBoundModifier::Maybe {
|
match modifier {
|
||||||
self.word("?");
|
TraitBoundModifier::None => {}
|
||||||
|
TraitBoundModifier::Maybe => {
|
||||||
|
self.word("?");
|
||||||
|
}
|
||||||
|
TraitBoundModifier::MaybeConst => {
|
||||||
|
self.word_space("~const");
|
||||||
|
}
|
||||||
|
TraitBoundModifier::MaybeConstMaybe => {
|
||||||
|
self.word_space("~const");
|
||||||
|
self.word("?");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
self.print_poly_trait_ref(tref);
|
self.print_poly_trait_ref(tref);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1139,7 +1139,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||||
if let ty::Adt(def, substs) = ty.kind()
|
if let ty::Adt(def, substs) = ty.kind()
|
||||||
&& Some(def.did()) == tcx.lang_items().pin_type()
|
&& Some(def.did()) == tcx.lang_items().pin_type()
|
||||||
&& let ty::Ref(_, _, hir::Mutability::Mut) = substs.type_at(0).kind()
|
&& let ty::Ref(_, _, hir::Mutability::Mut) = substs.type_at(0).kind()
|
||||||
&& let self_ty = infcx.replace_bound_vars_with_fresh_vars(
|
&& let self_ty = infcx.instantiate_binder_with_fresh_vars(
|
||||||
fn_call_span,
|
fn_call_span,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
tcx.fn_sig(method_did).subst(tcx, method_substs).input(0),
|
tcx.fn_sig(method_did).subst(tcx, method_substs).input(0),
|
||||||
|
|
|
@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
// so that they represent the view from "inside" the closure.
|
// so that they represent the view from "inside" the closure.
|
||||||
let user_provided_sig = self
|
let user_provided_sig = self
|
||||||
.instantiate_canonical_with_fresh_inference_vars(body.span, &user_provided_poly_sig);
|
.instantiate_canonical_with_fresh_inference_vars(body.span, &user_provided_poly_sig);
|
||||||
let user_provided_sig = self.infcx.replace_bound_vars_with_fresh_vars(
|
let user_provided_sig = self.infcx.instantiate_binder_with_fresh_vars(
|
||||||
body.span,
|
body.span,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
user_provided_sig,
|
user_provided_sig,
|
||||||
|
|
|
@ -153,7 +153,10 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
|
||||||
let path_debug = cx.path_global(span, cx.std_path(&[sym::fmt, sym::Debug]));
|
let path_debug = cx.path_global(span, cx.std_path(&[sym::fmt, sym::Debug]));
|
||||||
let ty_dyn_debug = cx.ty(
|
let ty_dyn_debug = cx.ty(
|
||||||
span,
|
span,
|
||||||
ast::TyKind::TraitObject(vec![cx.trait_bound(path_debug)], ast::TraitObjectSyntax::Dyn),
|
ast::TyKind::TraitObject(
|
||||||
|
vec![cx.trait_bound(path_debug, false)],
|
||||||
|
ast::TraitObjectSyntax::Dyn,
|
||||||
|
),
|
||||||
);
|
);
|
||||||
let ty_slice = cx.ty(
|
let ty_slice = cx.ty(
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -605,18 +605,26 @@ impl<'a> TraitDef<'a> {
|
||||||
let bounds: Vec<_> = self
|
let bounds: Vec<_> = self
|
||||||
.additional_bounds
|
.additional_bounds
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)))
|
.map(|p| {
|
||||||
|
cx.trait_bound(
|
||||||
|
p.to_path(cx, self.span, type_ident, generics),
|
||||||
|
self.is_const,
|
||||||
|
)
|
||||||
|
})
|
||||||
.chain(
|
.chain(
|
||||||
// Add a bound for the current trait.
|
// Add a bound for the current trait.
|
||||||
self.skip_path_as_bound
|
self.skip_path_as_bound
|
||||||
.not()
|
.not()
|
||||||
.then(|| cx.trait_bound(trait_path.clone())),
|
.then(|| cx.trait_bound(trait_path.clone(), self.is_const)),
|
||||||
)
|
)
|
||||||
.chain({
|
.chain({
|
||||||
// Add a `Copy` bound if required.
|
// Add a `Copy` bound if required.
|
||||||
if is_packed && self.needs_copy_as_bound_if_packed {
|
if is_packed && self.needs_copy_as_bound_if_packed {
|
||||||
let p = deriving::path_std!(marker::Copy);
|
let p = deriving::path_std!(marker::Copy);
|
||||||
Some(cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)))
|
Some(cx.trait_bound(
|
||||||
|
p.to_path(cx, self.span, type_ident, generics),
|
||||||
|
self.is_const,
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -694,18 +702,24 @@ impl<'a> TraitDef<'a> {
|
||||||
let mut bounds: Vec<_> = self
|
let mut bounds: Vec<_> = self
|
||||||
.additional_bounds
|
.additional_bounds
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)))
|
.map(|p| {
|
||||||
|
cx.trait_bound(
|
||||||
|
p.to_path(cx, self.span, type_ident, generics),
|
||||||
|
self.is_const,
|
||||||
|
)
|
||||||
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Require the current trait.
|
// Require the current trait.
|
||||||
bounds.push(cx.trait_bound(trait_path.clone()));
|
bounds.push(cx.trait_bound(trait_path.clone(), self.is_const));
|
||||||
|
|
||||||
// Add a `Copy` bound if required.
|
// Add a `Copy` bound if required.
|
||||||
if is_packed && self.needs_copy_as_bound_if_packed {
|
if is_packed && self.needs_copy_as_bound_if_packed {
|
||||||
let p = deriving::path_std!(marker::Copy);
|
let p = deriving::path_std!(marker::Copy);
|
||||||
bounds.push(
|
bounds.push(cx.trait_bound(
|
||||||
cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)),
|
p.to_path(cx, self.span, type_ident, generics),
|
||||||
);
|
self.is_const,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let predicate = ast::WhereBoundPredicate {
|
let predicate = ast::WhereBoundPredicate {
|
||||||
|
|
|
@ -154,7 +154,7 @@ fn mk_ty_param(
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b| {
|
.map(|b| {
|
||||||
let path = b.to_path(cx, span, self_ident, self_generics);
|
let path = b.to_path(cx, span, self_ident, self_generics);
|
||||||
cx.trait_bound(path)
|
cx.trait_bound(path, false)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
cx.typaram(span, Ident::new(name, span), bounds, None)
|
cx.typaram(span, Ident::new(name, span), bounds, None)
|
||||||
|
|
|
@ -131,10 +131,14 @@ impl<'a> ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trait_bound(&self, path: ast::Path) -> ast::GenericBound {
|
pub fn trait_bound(&self, path: ast::Path, is_const: bool) -> ast::GenericBound {
|
||||||
ast::GenericBound::Trait(
|
ast::GenericBound::Trait(
|
||||||
self.poly_trait_ref(path.span, path),
|
self.poly_trait_ref(path.span, path),
|
||||||
ast::TraitBoundModifier::None,
|
if is_const {
|
||||||
|
ast::TraitBoundModifier::MaybeConst
|
||||||
|
} else {
|
||||||
|
ast::TraitBoundModifier::None
|
||||||
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -246,7 +246,7 @@ fn compare_method_predicate_entailment<'tcx>(
|
||||||
|
|
||||||
let mut wf_tys = FxIndexSet::default();
|
let mut wf_tys = FxIndexSet::default();
|
||||||
|
|
||||||
let unnormalized_impl_sig = infcx.replace_bound_vars_with_fresh_vars(
|
let unnormalized_impl_sig = infcx.instantiate_binder_with_fresh_vars(
|
||||||
impl_m_span,
|
impl_m_span,
|
||||||
infer::HigherRankedType,
|
infer::HigherRankedType,
|
||||||
tcx.fn_sig(impl_m.def_id).subst_identity(),
|
tcx.fn_sig(impl_m.def_id).subst_identity(),
|
||||||
|
@ -640,7 +640,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
||||||
let impl_sig = ocx.normalize(
|
let impl_sig = ocx.normalize(
|
||||||
&norm_cause,
|
&norm_cause,
|
||||||
param_env,
|
param_env,
|
||||||
infcx.replace_bound_vars_with_fresh_vars(
|
infcx.instantiate_binder_with_fresh_vars(
|
||||||
return_span,
|
return_span,
|
||||||
infer::HigherRankedType,
|
infer::HigherRankedType,
|
||||||
tcx.fn_sig(impl_m.def_id).subst_identity(),
|
tcx.fn_sig(impl_m.def_id).subst_identity(),
|
||||||
|
|
|
@ -156,7 +156,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// fnmut vs fnonce. If so, we have to defer further processing.
|
// fnmut vs fnonce. If so, we have to defer further processing.
|
||||||
if self.closure_kind(substs).is_none() {
|
if self.closure_kind(substs).is_none() {
|
||||||
let closure_sig = substs.as_closure().sig();
|
let closure_sig = substs.as_closure().sig();
|
||||||
let closure_sig = self.replace_bound_vars_with_fresh_vars(
|
let closure_sig = self.instantiate_binder_with_fresh_vars(
|
||||||
call_expr.span,
|
call_expr.span,
|
||||||
infer::FnCall,
|
infer::FnCall,
|
||||||
closure_sig,
|
closure_sig,
|
||||||
|
@ -437,7 +437,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// renormalize the associated types at this point, since they
|
// renormalize the associated types at this point, since they
|
||||||
// previously appeared within a `Binder<>` and hence would not
|
// previously appeared within a `Binder<>` and hence would not
|
||||||
// have been normalized before.
|
// have been normalized before.
|
||||||
let fn_sig = self.replace_bound_vars_with_fresh_vars(call_expr.span, infer::FnCall, fn_sig);
|
let fn_sig = self.instantiate_binder_with_fresh_vars(call_expr.span, infer::FnCall, fn_sig);
|
||||||
let fn_sig = self.normalize(call_expr.span, fn_sig);
|
let fn_sig = self.normalize(call_expr.span, fn_sig);
|
||||||
|
|
||||||
// Call the generic checker.
|
// Call the generic checker.
|
||||||
|
|
|
@ -544,7 +544,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
)
|
)
|
||||||
.map(|(hir_ty, &supplied_ty)| {
|
.map(|(hir_ty, &supplied_ty)| {
|
||||||
// Instantiate (this part of..) S to S', i.e., with fresh variables.
|
// Instantiate (this part of..) S to S', i.e., with fresh variables.
|
||||||
self.replace_bound_vars_with_fresh_vars(
|
self.instantiate_binder_with_fresh_vars(
|
||||||
hir_ty.span,
|
hir_ty.span,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
// (*) binder moved to here
|
// (*) binder moved to here
|
||||||
|
@ -566,7 +566,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
all_obligations.extend(obligations);
|
all_obligations.extend(obligations);
|
||||||
}
|
}
|
||||||
|
|
||||||
let supplied_output_ty = self.replace_bound_vars_with_fresh_vars(
|
let supplied_output_ty = self.instantiate_binder_with_fresh_vars(
|
||||||
decl.output.span(),
|
decl.output.span(),
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
supplied_sig.output(),
|
supplied_sig.output(),
|
||||||
|
|
|
@ -568,7 +568,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// placeholder lifetimes with probing, we just replace higher lifetimes
|
// placeholder lifetimes with probing, we just replace higher lifetimes
|
||||||
// with fresh vars.
|
// with fresh vars.
|
||||||
let span = args.get(i).map(|a| a.span).unwrap_or(expr.span);
|
let span = args.get(i).map(|a| a.span).unwrap_or(expr.span);
|
||||||
let input = self.replace_bound_vars_with_fresh_vars(
|
let input = self.instantiate_binder_with_fresh_vars(
|
||||||
span,
|
span,
|
||||||
infer::LateBoundRegionConversionTime::FnCall,
|
infer::LateBoundRegionConversionTime::FnCall,
|
||||||
fn_sig.input(i),
|
fn_sig.input(i),
|
||||||
|
@ -586,7 +586,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// Also, as we just want to check sizedness, instead of introducing
|
// Also, as we just want to check sizedness, instead of introducing
|
||||||
// placeholder lifetimes with probing, we just replace higher lifetimes
|
// placeholder lifetimes with probing, we just replace higher lifetimes
|
||||||
// with fresh vars.
|
// with fresh vars.
|
||||||
let output = self.replace_bound_vars_with_fresh_vars(
|
let output = self.instantiate_binder_with_fresh_vars(
|
||||||
expr.span,
|
expr.span,
|
||||||
infer::LateBoundRegionConversionTime::FnCall,
|
infer::LateBoundRegionConversionTime::FnCall,
|
||||||
fn_sig.output(),
|
fn_sig.output(),
|
||||||
|
|
|
@ -289,7 +289,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
|
||||||
item_segment: &hir::PathSegment<'_>,
|
item_segment: &hir::PathSegment<'_>,
|
||||||
poly_trait_ref: ty::PolyTraitRef<'tcx>,
|
poly_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
) -> Ty<'tcx> {
|
) -> Ty<'tcx> {
|
||||||
let trait_ref = self.replace_bound_vars_with_fresh_vars(
|
let trait_ref = self.instantiate_binder_with_fresh_vars(
|
||||||
span,
|
span,
|
||||||
infer::LateBoundRegionConversionTime::AssocTypeProjection(item_def_id),
|
infer::LateBoundRegionConversionTime::AssocTypeProjection(item_def_id),
|
||||||
poly_trait_ref,
|
poly_trait_ref,
|
||||||
|
|
|
@ -262,7 +262,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||||
let original_poly_trait_ref = principal.with_self_ty(this.tcx, object_ty);
|
let original_poly_trait_ref = principal.with_self_ty(this.tcx, object_ty);
|
||||||
let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id);
|
let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id);
|
||||||
let upcast_trait_ref =
|
let upcast_trait_ref =
|
||||||
this.replace_bound_vars_with_fresh_vars(upcast_poly_trait_ref);
|
this.instantiate_binder_with_fresh_vars(upcast_poly_trait_ref);
|
||||||
debug!(
|
debug!(
|
||||||
"original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}",
|
"original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}",
|
||||||
original_poly_trait_ref, upcast_trait_ref, trait_def_id
|
original_poly_trait_ref, upcast_trait_ref, trait_def_id
|
||||||
|
@ -285,7 +285,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||||
probe::WhereClausePick(poly_trait_ref) => {
|
probe::WhereClausePick(poly_trait_ref) => {
|
||||||
// Where clauses can have bound regions in them. We need to instantiate
|
// Where clauses can have bound regions in them. We need to instantiate
|
||||||
// those to convert from a poly-trait-ref to a trait-ref.
|
// those to convert from a poly-trait-ref to a trait-ref.
|
||||||
self.replace_bound_vars_with_fresh_vars(poly_trait_ref).substs
|
self.instantiate_binder_with_fresh_vars(poly_trait_ref).substs
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -506,7 +506,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||||
let sig = self.tcx.fn_sig(def_id).subst(self.tcx, all_substs);
|
let sig = self.tcx.fn_sig(def_id).subst(self.tcx, all_substs);
|
||||||
debug!("type scheme substituted, sig={:?}", sig);
|
debug!("type scheme substituted, sig={:?}", sig);
|
||||||
|
|
||||||
let sig = self.replace_bound_vars_with_fresh_vars(sig);
|
let sig = self.instantiate_binder_with_fresh_vars(sig);
|
||||||
debug!("late-bound lifetimes from method instantiated, sig={:?}", sig);
|
debug!("late-bound lifetimes from method instantiated, sig={:?}", sig);
|
||||||
|
|
||||||
(sig, method_predicates)
|
(sig, method_predicates)
|
||||||
|
@ -625,10 +625,10 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||||
upcast_trait_refs.into_iter().next().unwrap()
|
upcast_trait_refs.into_iter().next().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn replace_bound_vars_with_fresh_vars<T>(&self, value: ty::Binder<'tcx, T>) -> T
|
fn instantiate_binder_with_fresh_vars<T>(&self, value: ty::Binder<'tcx, T>) -> T
|
||||||
where
|
where
|
||||||
T: TypeFoldable<'tcx> + Copy,
|
T: TypeFoldable<'tcx> + Copy,
|
||||||
{
|
{
|
||||||
self.fcx.replace_bound_vars_with_fresh_vars(self.span, infer::FnCall, value)
|
self.fcx.instantiate_binder_with_fresh_vars(self.span, infer::FnCall, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -401,7 +401,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
// with bound regions.
|
// with bound regions.
|
||||||
let fn_sig = tcx.fn_sig(def_id).subst(self.tcx, substs);
|
let fn_sig = tcx.fn_sig(def_id).subst(self.tcx, substs);
|
||||||
let fn_sig =
|
let fn_sig =
|
||||||
self.replace_bound_vars_with_fresh_vars(obligation.cause.span, infer::FnCall, fn_sig);
|
self.instantiate_binder_with_fresh_vars(obligation.cause.span, infer::FnCall, fn_sig);
|
||||||
|
|
||||||
let InferOk { value, obligations: o } =
|
let InferOk { value, obligations: o } =
|
||||||
self.at(&obligation.cause, self.param_env).normalize(fn_sig);
|
self.at(&obligation.cause, self.param_env).normalize(fn_sig);
|
||||||
|
|
|
@ -924,7 +924,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
||||||
ty::AssocKind::Fn => self.probe(|_| {
|
ty::AssocKind::Fn => self.probe(|_| {
|
||||||
let substs = self.fresh_substs_for_item(self.span, method.def_id);
|
let substs = self.fresh_substs_for_item(self.span, method.def_id);
|
||||||
let fty = self.tcx.fn_sig(method.def_id).subst(self.tcx, substs);
|
let fty = self.tcx.fn_sig(method.def_id).subst(self.tcx, substs);
|
||||||
let fty = self.replace_bound_vars_with_fresh_vars(self.span, infer::FnCall, fty);
|
let fty = self.instantiate_binder_with_fresh_vars(self.span, infer::FnCall, fty);
|
||||||
|
|
||||||
if let Some(self_ty) = self_ty {
|
if let Some(self_ty) = self_ty {
|
||||||
if self
|
if self
|
||||||
|
|
|
@ -129,7 +129,7 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> {
|
||||||
let a_types = infcx.tcx.anonymize_bound_vars(a_types);
|
let a_types = infcx.tcx.anonymize_bound_vars(a_types);
|
||||||
let b_types = infcx.tcx.anonymize_bound_vars(b_types);
|
let b_types = infcx.tcx.anonymize_bound_vars(b_types);
|
||||||
if a_types.bound_vars() == b_types.bound_vars() {
|
if a_types.bound_vars() == b_types.bound_vars() {
|
||||||
let (a_types, b_types) = infcx.replace_bound_vars_with_placeholders(
|
let (a_types, b_types) = infcx.instantiate_binder_with_placeholders(
|
||||||
a_types.map_bound(|a_types| (a_types, b_types.skip_binder())),
|
a_types.map_bound(|a_types| (a_types, b_types.skip_binder())),
|
||||||
);
|
);
|
||||||
for (a, b) in std::iter::zip(a_types, b_types) {
|
for (a, b) in std::iter::zip(a_types, b_types) {
|
||||||
|
|
|
@ -38,13 +38,13 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
|
||||||
// First, we instantiate each bound region in the supertype with a
|
// First, we instantiate each bound region in the supertype with a
|
||||||
// fresh placeholder region. Note that this automatically creates
|
// fresh placeholder region. Note that this automatically creates
|
||||||
// a new universe if needed.
|
// a new universe if needed.
|
||||||
let sup_prime = self.infcx.replace_bound_vars_with_placeholders(sup);
|
let sup_prime = self.infcx.instantiate_binder_with_placeholders(sup);
|
||||||
|
|
||||||
// Next, we instantiate each bound region in the subtype
|
// Next, we instantiate each bound region in the subtype
|
||||||
// with a fresh region variable. These region variables --
|
// with a fresh region variable. These region variables --
|
||||||
// but no other pre-existing region variables -- can name
|
// but no other pre-existing region variables -- can name
|
||||||
// the placeholders.
|
// the placeholders.
|
||||||
let sub_prime = self.infcx.replace_bound_vars_with_fresh_vars(span, HigherRankedType, sub);
|
let sub_prime = self.infcx.instantiate_binder_with_fresh_vars(span, HigherRankedType, sub);
|
||||||
|
|
||||||
debug!("a_prime={:?}", sub_prime);
|
debug!("a_prime={:?}", sub_prime);
|
||||||
debug!("b_prime={:?}", sup_prime);
|
debug!("b_prime={:?}", sup_prime);
|
||||||
|
@ -70,7 +70,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
///
|
///
|
||||||
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html
|
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html
|
||||||
#[instrument(level = "debug", skip(self), ret)]
|
#[instrument(level = "debug", skip(self), ret)]
|
||||||
pub fn replace_bound_vars_with_placeholders<T>(&self, binder: ty::Binder<'tcx, T>) -> T
|
pub fn instantiate_binder_with_placeholders<T>(&self, binder: ty::Binder<'tcx, T>) -> T
|
||||||
where
|
where
|
||||||
T: TypeFoldable<'tcx> + Copy,
|
T: TypeFoldable<'tcx> + Copy,
|
||||||
{
|
{
|
||||||
|
|
|
@ -995,7 +995,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
|
|
||||||
Ok(self.commit_if_ok(|_snapshot| {
|
Ok(self.commit_if_ok(|_snapshot| {
|
||||||
let ty::SubtypePredicate { a_is_expected, a, b } =
|
let ty::SubtypePredicate { a_is_expected, a, b } =
|
||||||
self.replace_bound_vars_with_placeholders(predicate);
|
self.instantiate_binder_with_placeholders(predicate);
|
||||||
|
|
||||||
let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?;
|
let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?;
|
||||||
|
|
||||||
|
@ -1008,7 +1008,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
cause: &traits::ObligationCause<'tcx>,
|
cause: &traits::ObligationCause<'tcx>,
|
||||||
predicate: ty::PolyRegionOutlivesPredicate<'tcx>,
|
predicate: ty::PolyRegionOutlivesPredicate<'tcx>,
|
||||||
) {
|
) {
|
||||||
let ty::OutlivesPredicate(r_a, r_b) = self.replace_bound_vars_with_placeholders(predicate);
|
let ty::OutlivesPredicate(r_a, r_b) = self.instantiate_binder_with_placeholders(predicate);
|
||||||
let origin =
|
let origin =
|
||||||
SubregionOrigin::from_obligation_cause(cause, || RelateRegionParamBound(cause.span));
|
SubregionOrigin::from_obligation_cause(cause, || RelateRegionParamBound(cause.span));
|
||||||
self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b`
|
self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b`
|
||||||
|
@ -1447,7 +1447,14 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||||
value
|
value
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_bound_vars_with_fresh_vars<T>(
|
// Instantiates the bound variables in a given binder with fresh inference
|
||||||
|
// variables in the current universe.
|
||||||
|
//
|
||||||
|
// Use this method if you'd like to find some substitution of the binder's
|
||||||
|
// variables (e.g. during a method call). If there isn't a [`LateBoundRegionConversionTime`]
|
||||||
|
// that corresponds to your use case, consider whether or not you should
|
||||||
|
// use [`InferCtxt::instantiate_binder_with_placeholders`] instead.
|
||||||
|
pub fn instantiate_binder_with_fresh_vars<T>(
|
||||||
&self,
|
&self,
|
||||||
span: Span,
|
span: Span,
|
||||||
lbrct: LateBoundRegionConversionTime,
|
lbrct: LateBoundRegionConversionTime,
|
||||||
|
|
|
@ -161,7 +161,7 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
|
||||||
let a_types = infcx.tcx.anonymize_bound_vars(a_types);
|
let a_types = infcx.tcx.anonymize_bound_vars(a_types);
|
||||||
let b_types = infcx.tcx.anonymize_bound_vars(b_types);
|
let b_types = infcx.tcx.anonymize_bound_vars(b_types);
|
||||||
if a_types.bound_vars() == b_types.bound_vars() {
|
if a_types.bound_vars() == b_types.bound_vars() {
|
||||||
let (a_types, b_types) = infcx.replace_bound_vars_with_placeholders(
|
let (a_types, b_types) = infcx.instantiate_binder_with_placeholders(
|
||||||
a_types.map_bound(|a_types| (a_types, b_types.skip_binder())),
|
a_types.map_bound(|a_types| (a_types, b_types.skip_binder())),
|
||||||
);
|
);
|
||||||
for (a, b) in std::iter::zip(a_types, b_types) {
|
for (a, b) in std::iter::zip(a_types, b_types) {
|
||||||
|
|
|
@ -54,14 +54,14 @@ rustc_queries! {
|
||||||
/// This is because the `hir_crate` query gives you access to all other items.
|
/// This is because the `hir_crate` query gives you access to all other items.
|
||||||
/// To avoid this fate, do not call `tcx.hir().krate()`; instead,
|
/// To avoid this fate, do not call `tcx.hir().krate()`; instead,
|
||||||
/// prefer wrappers like `tcx.visit_all_items_in_krate()`.
|
/// prefer wrappers like `tcx.visit_all_items_in_krate()`.
|
||||||
query hir_crate(key: ()) -> Crate<'tcx> {
|
query hir_crate(key: ()) -> &'tcx Crate<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "getting the crate HIR" }
|
desc { "getting the crate HIR" }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// All items in the crate.
|
/// All items in the crate.
|
||||||
query hir_crate_items(_: ()) -> rustc_middle::hir::ModuleItems {
|
query hir_crate_items(_: ()) -> &'tcx rustc_middle::hir::ModuleItems {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "getting HIR crate items" }
|
desc { "getting HIR crate items" }
|
||||||
|
@ -71,7 +71,7 @@ rustc_queries! {
|
||||||
///
|
///
|
||||||
/// This can be conveniently accessed by `tcx.hir().visit_item_likes_in_module`.
|
/// This can be conveniently accessed by `tcx.hir().visit_item_likes_in_module`.
|
||||||
/// Avoid calling this query directly.
|
/// Avoid calling this query directly.
|
||||||
query hir_module_items(key: LocalDefId) -> rustc_middle::hir::ModuleItems {
|
query hir_module_items(key: LocalDefId) -> &'tcx rustc_middle::hir::ModuleItems {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "getting HIR module items in `{}`", tcx.def_path_str(key.to_def_id()) }
|
desc { |tcx| "getting HIR module items in `{}`", tcx.def_path_str(key.to_def_id()) }
|
||||||
cache_on_disk_if { true }
|
cache_on_disk_if { true }
|
||||||
|
@ -183,7 +183,7 @@ rustc_queries! {
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
|
|
||||||
query unsizing_params_for_adt(key: DefId) -> rustc_index::bit_set::BitSet<u32>
|
query unsizing_params_for_adt(key: DefId) -> &'tcx rustc_index::bit_set::BitSet<u32>
|
||||||
{
|
{
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx|
|
desc { |tcx|
|
||||||
|
@ -218,7 +218,7 @@ rustc_queries! {
|
||||||
|
|
||||||
/// Maps from the `DefId` of an item (trait/struct/enum/fn) to its
|
/// Maps from the `DefId` of an item (trait/struct/enum/fn) to its
|
||||||
/// associated generics.
|
/// associated generics.
|
||||||
query generics_of(key: DefId) -> ty::Generics {
|
query generics_of(key: DefId) -> &'tcx ty::Generics {
|
||||||
desc { |tcx| "computing generics of `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "computing generics of `{}`", tcx.def_path_str(key) }
|
||||||
arena_cache
|
arena_cache
|
||||||
cache_on_disk_if { key.is_local() }
|
cache_on_disk_if { key.is_local() }
|
||||||
|
@ -295,19 +295,19 @@ rustc_queries! {
|
||||||
/// These are assembled from the following places:
|
/// These are assembled from the following places:
|
||||||
/// - `extern` blocks (depending on their `link` attributes)
|
/// - `extern` blocks (depending on their `link` attributes)
|
||||||
/// - the `libs` (`-l`) option
|
/// - the `libs` (`-l`) option
|
||||||
query native_libraries(_: CrateNum) -> Vec<NativeLib> {
|
query native_libraries(_: CrateNum) -> &'tcx Vec<NativeLib> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "looking up the native libraries of a linked crate" }
|
desc { "looking up the native libraries of a linked crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
|
|
||||||
query shallow_lint_levels_on(key: hir::OwnerId) -> rustc_middle::lint::ShallowLintLevelMap {
|
query shallow_lint_levels_on(key: hir::OwnerId) -> &'tcx rustc_middle::lint::ShallowLintLevelMap {
|
||||||
eval_always // fetches `resolutions`
|
eval_always // fetches `resolutions`
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "looking up lint levels for `{}`", tcx.def_path_str(key.to_def_id()) }
|
desc { |tcx| "looking up lint levels for `{}`", tcx.def_path_str(key.to_def_id()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query lint_expectations(_: ()) -> Vec<(LintExpectationId, LintExpectation)> {
|
query lint_expectations(_: ()) -> &'tcx Vec<(LintExpectationId, LintExpectation)> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "computing `#[expect]`ed lints in this crate" }
|
desc { "computing `#[expect]`ed lints in this crate" }
|
||||||
}
|
}
|
||||||
|
@ -347,7 +347,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set of param indexes for type params that are in the type's representation
|
/// Set of param indexes for type params that are in the type's representation
|
||||||
query params_in_repr(key: DefId) -> rustc_index::bit_set::BitSet<u32> {
|
query params_in_repr(key: DefId) -> &'tcx rustc_index::bit_set::BitSet<u32> {
|
||||||
desc { "finding type parameters in the representation" }
|
desc { "finding type parameters in the representation" }
|
||||||
arena_cache
|
arena_cache
|
||||||
no_hash
|
no_hash
|
||||||
|
@ -364,14 +364,14 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a THIR tree for debugging.
|
/// Create a THIR tree for debugging.
|
||||||
query thir_tree(key: ty::WithOptConstParam<LocalDefId>) -> String {
|
query thir_tree(key: ty::WithOptConstParam<LocalDefId>) -> &'tcx String {
|
||||||
no_hash
|
no_hash
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "constructing THIR tree for `{}`", tcx.def_path_str(key.did.to_def_id()) }
|
desc { |tcx| "constructing THIR tree for `{}`", tcx.def_path_str(key.did.to_def_id()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a list-like THIR representation for debugging.
|
/// Create a list-like THIR representation for debugging.
|
||||||
query thir_flat(key: ty::WithOptConstParam<LocalDefId>) -> String {
|
query thir_flat(key: ty::WithOptConstParam<LocalDefId>) -> &'tcx String {
|
||||||
no_hash
|
no_hash
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "constructing flat THIR representation for `{}`", tcx.def_path_str(key.did.to_def_id()) }
|
desc { |tcx| "constructing flat THIR representation for `{}`", tcx.def_path_str(key.did.to_def_id()) }
|
||||||
|
@ -380,7 +380,7 @@ rustc_queries! {
|
||||||
/// Set of all the `DefId`s in this crate that have MIR associated with
|
/// Set of all the `DefId`s in this crate that have MIR associated with
|
||||||
/// them. This includes all the body owners, but also things like struct
|
/// them. This includes all the body owners, but also things like struct
|
||||||
/// constructors.
|
/// constructors.
|
||||||
query mir_keys(_: ()) -> rustc_data_structures::fx::FxIndexSet<LocalDefId> {
|
query mir_keys(_: ()) -> &'tcx rustc_data_structures::fx::FxIndexSet<LocalDefId> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "getting a list of all mir_keys" }
|
desc { "getting a list of all mir_keys" }
|
||||||
}
|
}
|
||||||
|
@ -478,7 +478,7 @@ rustc_queries! {
|
||||||
|
|
||||||
query symbols_for_closure_captures(
|
query symbols_for_closure_captures(
|
||||||
key: (LocalDefId, LocalDefId)
|
key: (LocalDefId, LocalDefId)
|
||||||
) -> Vec<rustc_span::Symbol> {
|
) -> &'tcx Vec<rustc_span::Symbol> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc {
|
desc {
|
||||||
|tcx| "finding symbols for captures of closure `{}` in `{}`",
|
|tcx| "finding symbols for captures of closure `{}` in `{}`",
|
||||||
|
@ -487,7 +487,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query mir_generator_witnesses(key: DefId) -> mir::GeneratorLayout<'tcx> {
|
query mir_generator_witnesses(key: DefId) -> &'tcx mir::GeneratorLayout<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "generator witness types for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "generator witness types for `{}`", tcx.def_path_str(key) }
|
||||||
cache_on_disk_if { key.is_local() }
|
cache_on_disk_if { key.is_local() }
|
||||||
|
@ -508,14 +508,14 @@ rustc_queries! {
|
||||||
|
|
||||||
/// Returns coverage summary info for a function, after executing the `InstrumentCoverage`
|
/// Returns coverage summary info for a function, after executing the `InstrumentCoverage`
|
||||||
/// MIR pass (assuming the -Cinstrument-coverage option is enabled).
|
/// MIR pass (assuming the -Cinstrument-coverage option is enabled).
|
||||||
query coverageinfo(key: ty::InstanceDef<'tcx>) -> mir::CoverageInfo {
|
query coverageinfo(key: ty::InstanceDef<'tcx>) -> &'tcx mir::CoverageInfo {
|
||||||
desc { |tcx| "retrieving coverage info from MIR for `{}`", tcx.def_path_str(key.def_id()) }
|
desc { |tcx| "retrieving coverage info from MIR for `{}`", tcx.def_path_str(key.def_id()) }
|
||||||
arena_cache
|
arena_cache
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the `CodeRegions` for a function that has instrumented coverage, in case the
|
/// Returns the `CodeRegions` for a function that has instrumented coverage, in case the
|
||||||
/// function was optimized out before codegen, and before being added to the Coverage Map.
|
/// function was optimized out before codegen, and before being added to the Coverage Map.
|
||||||
query covered_code_regions(key: DefId) -> Vec<&'tcx mir::coverage::CodeRegion> {
|
query covered_code_regions(key: DefId) -> &'tcx Vec<&'tcx mir::coverage::CodeRegion> {
|
||||||
desc {
|
desc {
|
||||||
|tcx| "retrieving the covered `CodeRegion`s, if instrumented, for `{}`",
|
|tcx| "retrieving the covered `CodeRegion`s, if instrumented, for `{}`",
|
||||||
tcx.def_path_str(key)
|
tcx.def_path_str(key)
|
||||||
|
@ -557,7 +557,7 @@ rustc_queries! {
|
||||||
desc { "erasing regions from `{}`", ty }
|
desc { "erasing regions from `{}`", ty }
|
||||||
}
|
}
|
||||||
|
|
||||||
query wasm_import_module_map(_: CrateNum) -> FxHashMap<DefId, String> {
|
query wasm_import_module_map(_: CrateNum) -> &'tcx FxHashMap<DefId, String> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "getting wasm import module map" }
|
desc { "getting wasm import module map" }
|
||||||
}
|
}
|
||||||
|
@ -632,7 +632,7 @@ rustc_queries! {
|
||||||
desc { |tcx| "computing the bounds for type parameter `{}`", tcx.hir().ty_param_name(key.1) }
|
desc { |tcx| "computing the bounds for type parameter `{}`", tcx.hir().ty_param_name(key.1) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query trait_def(key: DefId) -> ty::TraitDef {
|
query trait_def(key: DefId) -> &'tcx ty::TraitDef {
|
||||||
desc { |tcx| "computing trait definition for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "computing trait definition for `{}`", tcx.def_path_str(key) }
|
||||||
arena_cache
|
arena_cache
|
||||||
cache_on_disk_if { key.is_local() }
|
cache_on_disk_if { key.is_local() }
|
||||||
|
@ -703,7 +703,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets a map with the variance of every item; use `item_variance` instead.
|
/// Gets a map with the variance of every item; use `item_variance` instead.
|
||||||
query crate_variances(_: ()) -> ty::CrateVariancesMap<'tcx> {
|
query crate_variances(_: ()) -> &'tcx ty::CrateVariancesMap<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "computing the variances for items in this crate" }
|
desc { "computing the variances for items in this crate" }
|
||||||
}
|
}
|
||||||
|
@ -716,7 +716,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps from thee `DefId` of a type to its (inferred) outlives.
|
/// Maps from thee `DefId` of a type to its (inferred) outlives.
|
||||||
query inferred_outlives_crate(_: ()) -> ty::CratePredicatesMap<'tcx> {
|
query inferred_outlives_crate(_: ()) -> &'tcx ty::CratePredicatesMap<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "computing the inferred outlives predicates for items in this crate" }
|
desc { "computing the inferred outlives predicates for items in this crate" }
|
||||||
}
|
}
|
||||||
|
@ -729,7 +729,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Maps from a trait item to the trait item "descriptor".
|
/// Maps from a trait item to the trait item "descriptor".
|
||||||
query associated_item(key: DefId) -> ty::AssocItem {
|
query associated_item(key: DefId) -> &'tcx ty::AssocItem {
|
||||||
desc { |tcx| "computing associated item data for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "computing associated item data for `{}`", tcx.def_path_str(key) }
|
||||||
arena_cache
|
arena_cache
|
||||||
cache_on_disk_if { key.is_local() }
|
cache_on_disk_if { key.is_local() }
|
||||||
|
@ -737,7 +737,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collects the associated items defined on a trait or impl.
|
/// Collects the associated items defined on a trait or impl.
|
||||||
query associated_items(key: DefId) -> ty::AssocItems<'tcx> {
|
query associated_items(key: DefId) -> &'tcx ty::AssocItems<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "collecting associated items of `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "collecting associated items of `{}`", tcx.def_path_str(key) }
|
||||||
}
|
}
|
||||||
|
@ -763,7 +763,7 @@ rustc_queries! {
|
||||||
///
|
///
|
||||||
/// The map returned for `tcx.impl_item_implementor_ids(impl_id)` would be
|
/// The map returned for `tcx.impl_item_implementor_ids(impl_id)` would be
|
||||||
///`{ trait_f: impl_f, trait_g: impl_g }`
|
///`{ trait_f: impl_f, trait_g: impl_g }`
|
||||||
query impl_item_implementor_ids(impl_id: DefId) -> FxHashMap<DefId, DefId> {
|
query impl_item_implementor_ids(impl_id: DefId) -> &'tcx FxHashMap<DefId, DefId> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "comparing impl items against trait for `{}`", tcx.def_path_str(impl_id) }
|
desc { |tcx| "comparing impl items against trait for `{}`", tcx.def_path_str(impl_id) }
|
||||||
}
|
}
|
||||||
|
@ -884,7 +884,7 @@ rustc_queries! {
|
||||||
///
|
///
|
||||||
/// The second return value maps from ADTs to ignored derived traits (e.g. Debug and Clone) and
|
/// The second return value maps from ADTs to ignored derived traits (e.g. Debug and Clone) and
|
||||||
/// their respective impl (i.e., part of the derive macro)
|
/// their respective impl (i.e., part of the derive macro)
|
||||||
query live_symbols_and_ignored_derived_traits(_: ()) -> (
|
query live_symbols_and_ignored_derived_traits(_: ()) -> &'tcx (
|
||||||
FxHashSet<LocalDefId>,
|
FxHashSet<LocalDefId>,
|
||||||
FxHashMap<LocalDefId, Vec<(DefId, DefId)>>
|
FxHashMap<LocalDefId, Vec<(DefId, DefId)>>
|
||||||
) {
|
) {
|
||||||
|
@ -964,7 +964,7 @@ rustc_queries! {
|
||||||
|
|
||||||
/// Gets a complete map from all types to their inherent impls.
|
/// Gets a complete map from all types to their inherent impls.
|
||||||
/// Not meant to be used directly outside of coherence.
|
/// Not meant to be used directly outside of coherence.
|
||||||
query crate_inherent_impls(k: ()) -> CrateInherentImpls {
|
query crate_inherent_impls(k: ()) -> &'tcx CrateInherentImpls {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "finding all inherent impls defined in crate" }
|
desc { "finding all inherent impls defined in crate" }
|
||||||
}
|
}
|
||||||
|
@ -1099,7 +1099,7 @@ rustc_queries! {
|
||||||
desc { "checking for private elements in public interfaces" }
|
desc { "checking for private elements in public interfaces" }
|
||||||
}
|
}
|
||||||
|
|
||||||
query reachable_set(_: ()) -> FxHashSet<LocalDefId> {
|
query reachable_set(_: ()) -> &'tcx FxHashSet<LocalDefId> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "reachability" }
|
desc { "reachability" }
|
||||||
}
|
}
|
||||||
|
@ -1111,7 +1111,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates a MIR body for the shim.
|
/// Generates a MIR body for the shim.
|
||||||
query mir_shims(key: ty::InstanceDef<'tcx>) -> mir::Body<'tcx> {
|
query mir_shims(key: ty::InstanceDef<'tcx>) -> &'tcx mir::Body<'tcx> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "generating MIR shim for `{}`", tcx.def_path_str(key.def_id()) }
|
desc { |tcx| "generating MIR shim for `{}`", tcx.def_path_str(key.def_id()) }
|
||||||
}
|
}
|
||||||
|
@ -1191,7 +1191,7 @@ rustc_queries! {
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
|
|
||||||
query codegen_fn_attrs(def_id: DefId) -> CodegenFnAttrs {
|
query codegen_fn_attrs(def_id: DefId) -> &'tcx CodegenFnAttrs {
|
||||||
desc { |tcx| "computing codegen attributes of `{}`", tcx.def_path_str(def_id) }
|
desc { |tcx| "computing codegen attributes of `{}`", tcx.def_path_str(def_id) }
|
||||||
arena_cache
|
arena_cache
|
||||||
cache_on_disk_if { def_id.is_local() }
|
cache_on_disk_if { def_id.is_local() }
|
||||||
|
@ -1209,7 +1209,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
/// Gets the rendered value of the specified constant or associated constant.
|
/// Gets the rendered value of the specified constant or associated constant.
|
||||||
/// Used by rustdoc.
|
/// Used by rustdoc.
|
||||||
query rendered_const(def_id: DefId) -> String {
|
query rendered_const(def_id: DefId) -> &'tcx String {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "rendering constant initializer of `{}`", tcx.def_path_str(def_id) }
|
desc { |tcx| "rendering constant initializer of `{}`", tcx.def_path_str(def_id) }
|
||||||
cache_on_disk_if { def_id.is_local() }
|
cache_on_disk_if { def_id.is_local() }
|
||||||
|
@ -1268,12 +1268,12 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a trait `trait_id`, return all known `impl` blocks.
|
/// Given a trait `trait_id`, return all known `impl` blocks.
|
||||||
query trait_impls_of(trait_id: DefId) -> ty::trait_def::TraitImpls {
|
query trait_impls_of(trait_id: DefId) -> &'tcx ty::trait_def::TraitImpls {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "finding trait impls of `{}`", tcx.def_path_str(trait_id) }
|
desc { |tcx| "finding trait impls of `{}`", tcx.def_path_str(trait_id) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query specialization_graph_of(trait_id: DefId) -> specialization_graph::Graph {
|
query specialization_graph_of(trait_id: DefId) -> &'tcx specialization_graph::Graph {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "building specialization graph of trait `{}`", tcx.def_path_str(trait_id) }
|
desc { |tcx| "building specialization graph of trait `{}`", tcx.def_path_str(trait_id) }
|
||||||
cache_on_disk_if { true }
|
cache_on_disk_if { true }
|
||||||
|
@ -1403,7 +1403,7 @@ rustc_queries! {
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
|
|
||||||
query dependency_formats(_: ()) -> Lrc<crate::middle::dependency_format::Dependencies> {
|
query dependency_formats(_: ()) -> &'tcx Lrc<crate::middle::dependency_format::Dependencies> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "getting the linkage format of all dependencies" }
|
desc { "getting the linkage format of all dependencies" }
|
||||||
}
|
}
|
||||||
|
@ -1503,7 +1503,7 @@ rustc_queries! {
|
||||||
// Does not include external symbols that don't have a corresponding DefId,
|
// Does not include external symbols that don't have a corresponding DefId,
|
||||||
// like the compiler-generated `main` function and so on.
|
// like the compiler-generated `main` function and so on.
|
||||||
query reachable_non_generics(_: CrateNum)
|
query reachable_non_generics(_: CrateNum)
|
||||||
-> DefIdMap<SymbolExportInfo> {
|
-> &'tcx DefIdMap<SymbolExportInfo> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "looking up the exported symbols of a crate" }
|
desc { "looking up the exported symbols of a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -1526,7 +1526,7 @@ rustc_queries! {
|
||||||
/// added or removed in any upstream crate. Instead use the narrower
|
/// added or removed in any upstream crate. Instead use the narrower
|
||||||
/// `upstream_monomorphizations_for`, `upstream_drop_glue_for`, or, even
|
/// `upstream_monomorphizations_for`, `upstream_drop_glue_for`, or, even
|
||||||
/// better, `Instance::upstream_monomorphization()`.
|
/// better, `Instance::upstream_monomorphization()`.
|
||||||
query upstream_monomorphizations(_: ()) -> DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
|
query upstream_monomorphizations(_: ()) -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "collecting available upstream monomorphizations" }
|
desc { "collecting available upstream monomorphizations" }
|
||||||
}
|
}
|
||||||
|
@ -1568,7 +1568,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a list of all `extern` blocks of a crate.
|
/// Returns a list of all `extern` blocks of a crate.
|
||||||
query foreign_modules(_: CrateNum) -> FxHashMap<DefId, ForeignModule> {
|
query foreign_modules(_: CrateNum) -> &'tcx FxHashMap<DefId, ForeignModule> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "looking up the foreign modules of a linked crate" }
|
desc { "looking up the foreign modules of a linked crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -1602,7 +1602,7 @@ rustc_queries! {
|
||||||
|
|
||||||
/// Gets the extra data to put in each output filename for a crate.
|
/// Gets the extra data to put in each output filename for a crate.
|
||||||
/// For example, compiling the `foo` crate with `extra-filename=-a` creates a `libfoo-b.rlib` file.
|
/// For example, compiling the `foo` crate with `extra-filename=-a` creates a `libfoo-b.rlib` file.
|
||||||
query extra_filename(_: CrateNum) -> String {
|
query extra_filename(_: CrateNum) -> &'tcx String {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "looking up the extra filename for a crate" }
|
desc { "looking up the extra filename for a crate" }
|
||||||
|
@ -1610,7 +1610,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the paths where the crate came from in the file system.
|
/// Gets the paths where the crate came from in the file system.
|
||||||
query crate_extern_paths(_: CrateNum) -> Vec<PathBuf> {
|
query crate_extern_paths(_: CrateNum) -> &'tcx Vec<PathBuf> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "looking up the paths for extern crates" }
|
desc { "looking up the paths for extern crates" }
|
||||||
|
@ -1641,7 +1641,7 @@ rustc_queries! {
|
||||||
/// Does lifetime resolution on items. Importantly, we can't resolve
|
/// Does lifetime resolution on items. Importantly, we can't resolve
|
||||||
/// lifetimes directly on things like trait methods, because of trait params.
|
/// lifetimes directly on things like trait methods, because of trait params.
|
||||||
/// See `rustc_resolve::late::lifetimes for details.
|
/// See `rustc_resolve::late::lifetimes for details.
|
||||||
query resolve_lifetimes(_: hir::OwnerId) -> ResolveLifetimes {
|
query resolve_lifetimes(_: hir::OwnerId) -> &'tcx ResolveLifetimes {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "resolving lifetimes" }
|
desc { "resolving lifetimes" }
|
||||||
}
|
}
|
||||||
|
@ -1712,7 +1712,7 @@ rustc_queries! {
|
||||||
desc { |tcx| "computing crate imported by `{}`", tcx.def_path_str(def_id.to_def_id()) }
|
desc { |tcx| "computing crate imported by `{}`", tcx.def_path_str(def_id.to_def_id()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query lib_features(_: ()) -> LibFeatures {
|
query lib_features(_: ()) -> &'tcx LibFeatures {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "calculating the lib features map" }
|
desc { "calculating the lib features map" }
|
||||||
}
|
}
|
||||||
|
@ -1720,7 +1720,7 @@ rustc_queries! {
|
||||||
desc { "calculating the lib features defined in a crate" }
|
desc { "calculating the lib features defined in a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
query stability_implications(_: CrateNum) -> FxHashMap<Symbol, Symbol> {
|
query stability_implications(_: CrateNum) -> &'tcx FxHashMap<Symbol, Symbol> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "calculating the implications between `#[unstable]` features defined in a crate" }
|
desc { "calculating the implications between `#[unstable]` features defined in a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -1731,14 +1731,14 @@ rustc_queries! {
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
/// Returns the lang items defined in another crate by loading it from metadata.
|
/// Returns the lang items defined in another crate by loading it from metadata.
|
||||||
query get_lang_items(_: ()) -> LanguageItems {
|
query get_lang_items(_: ()) -> &'tcx LanguageItems {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "calculating the lang items map" }
|
desc { "calculating the lang items map" }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns all diagnostic items defined in all crates.
|
/// Returns all diagnostic items defined in all crates.
|
||||||
query all_diagnostic_items(_: ()) -> rustc_hir::diagnostic_items::DiagnosticItems {
|
query all_diagnostic_items(_: ()) -> &'tcx rustc_hir::diagnostic_items::DiagnosticItems {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "calculating the diagnostic items map" }
|
desc { "calculating the diagnostic items map" }
|
||||||
|
@ -1751,7 +1751,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the diagnostic items defined in a crate.
|
/// Returns the diagnostic items defined in a crate.
|
||||||
query diagnostic_items(_: CrateNum) -> rustc_hir::diagnostic_items::DiagnosticItems {
|
query diagnostic_items(_: CrateNum) -> &'tcx rustc_hir::diagnostic_items::DiagnosticItems {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "calculating the diagnostic items map in a crate" }
|
desc { "calculating the diagnostic items map in a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -1761,11 +1761,11 @@ rustc_queries! {
|
||||||
desc { "calculating the missing lang items in a crate" }
|
desc { "calculating the missing lang items in a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
query visible_parent_map(_: ()) -> DefIdMap<DefId> {
|
query visible_parent_map(_: ()) -> &'tcx DefIdMap<DefId> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "calculating the visible parent map" }
|
desc { "calculating the visible parent map" }
|
||||||
}
|
}
|
||||||
query trimmed_def_paths(_: ()) -> FxHashMap<DefId, Symbol> {
|
query trimmed_def_paths(_: ()) -> &'tcx FxHashMap<DefId, Symbol> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "calculating trimmed def paths" }
|
desc { "calculating trimmed def paths" }
|
||||||
}
|
}
|
||||||
|
@ -1774,14 +1774,14 @@ rustc_queries! {
|
||||||
desc { "seeing if we're missing an `extern crate` item for this crate" }
|
desc { "seeing if we're missing an `extern crate` item for this crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
query used_crate_source(_: CrateNum) -> Lrc<CrateSource> {
|
query used_crate_source(_: CrateNum) -> &'tcx Lrc<CrateSource> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "looking at the source for a crate" }
|
desc { "looking at the source for a crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
}
|
}
|
||||||
/// Returns the debugger visualizers defined for this crate.
|
/// Returns the debugger visualizers defined for this crate.
|
||||||
query debugger_visualizers(_: CrateNum) -> Vec<rustc_span::DebuggerVisualizerFile> {
|
query debugger_visualizers(_: CrateNum) -> &'tcx Vec<rustc_span::DebuggerVisualizerFile> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { "looking up the debugger visualizers for this crate" }
|
desc { "looking up the debugger visualizers for this crate" }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -1819,7 +1819,7 @@ rustc_queries! {
|
||||||
desc { |tcx| "finding names imported by glob use for `{}`", tcx.def_path_str(def_id.to_def_id()) }
|
desc { |tcx| "finding names imported by glob use for `{}`", tcx.def_path_str(def_id.to_def_id()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query stability_index(_: ()) -> stability::Index {
|
query stability_index(_: ()) -> &'tcx stability::Index {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "calculating the stability index for the local crate" }
|
desc { "calculating the stability index for the local crate" }
|
||||||
|
@ -1883,7 +1883,7 @@ rustc_queries! {
|
||||||
///
|
///
|
||||||
/// This query returns an `&Arc` because codegen backends need the value even after the `TyCtxt`
|
/// This query returns an `&Arc` because codegen backends need the value even after the `TyCtxt`
|
||||||
/// has been destroyed.
|
/// has been destroyed.
|
||||||
query output_filenames(_: ()) -> Arc<OutputFilenames> {
|
query output_filenames(_: ()) -> &'tcx Arc<OutputFilenames> {
|
||||||
feedable
|
feedable
|
||||||
desc { "getting output filenames" }
|
desc { "getting output filenames" }
|
||||||
arena_cache
|
arena_cache
|
||||||
|
@ -2056,7 +2056,7 @@ rustc_queries! {
|
||||||
remap_env_constness
|
remap_env_constness
|
||||||
}
|
}
|
||||||
|
|
||||||
query supported_target_features(_: CrateNum) -> FxHashMap<String, Option<Symbol>> {
|
query supported_target_features(_: CrateNum) -> &'tcx FxHashMap<String, Option<Symbol>> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "looking up supported target features" }
|
desc { "looking up supported target features" }
|
||||||
|
@ -2115,23 +2115,24 @@ rustc_queries! {
|
||||||
/// span) for an *existing* error. Therefore, it is best-effort, and may never handle
|
/// span) for an *existing* error. Therefore, it is best-effort, and may never handle
|
||||||
/// all of the cases that the normal `ty::Ty`-based wfcheck does. This is fine,
|
/// all of the cases that the normal `ty::Ty`-based wfcheck does. This is fine,
|
||||||
/// because the `ty::Ty`-based wfcheck is always run.
|
/// because the `ty::Ty`-based wfcheck is always run.
|
||||||
query diagnostic_hir_wf_check(key: (ty::Predicate<'tcx>, traits::WellFormedLoc)) -> Option<traits::ObligationCause<'tcx>> {
|
query diagnostic_hir_wf_check(
|
||||||
|
key: (ty::Predicate<'tcx>, traits::WellFormedLoc)
|
||||||
|
) -> &'tcx Option<traits::ObligationCause<'tcx>> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
no_hash
|
no_hash
|
||||||
desc { "performing HIR wf-checking for predicate `{:?}` at item `{:?}`", key.0, key.1 }
|
desc { "performing HIR wf-checking for predicate `{:?}` at item `{:?}`", key.0, key.1 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// The list of backend features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`,
|
/// The list of backend features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`,
|
||||||
/// `--target` and similar).
|
/// `--target` and similar).
|
||||||
query global_backend_features(_: ()) -> Vec<String> {
|
query global_backend_features(_: ()) -> &'tcx Vec<String> {
|
||||||
arena_cache
|
arena_cache
|
||||||
eval_always
|
eval_always
|
||||||
desc { "computing the backend features for CLI flags" }
|
desc { "computing the backend features for CLI flags" }
|
||||||
}
|
}
|
||||||
|
|
||||||
query generator_diagnostic_data(key: DefId) -> Option<GeneratorDiagnosticData<'tcx>> {
|
query generator_diagnostic_data(key: DefId) -> &'tcx Option<GeneratorDiagnosticData<'tcx>> {
|
||||||
arena_cache
|
arena_cache
|
||||||
desc { |tcx| "looking up generator diagnostic data of `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "looking up generator diagnostic data of `{}`", tcx.def_path_str(key) }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
|
|
@ -112,15 +112,15 @@ macro_rules! query_helper_param_ty {
|
||||||
($K:ty) => { $K };
|
($K:ty) => { $K };
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! query_storage {
|
macro_rules! query_if_arena {
|
||||||
([][$K:ty, $V:ty]) => {
|
([] $arena:ty, $no_arena:ty) => {
|
||||||
<<$K as Key>::CacheSelector as CacheSelector<'tcx, $V>>::Cache
|
$no_arena
|
||||||
};
|
};
|
||||||
([(arena_cache) $($rest:tt)*][$K:ty, $V:ty]) => {
|
([(arena_cache) $($rest:tt)*] $arena:ty, $no_arena:ty) => {
|
||||||
<<$K as Key>::CacheSelector as CacheSelector<'tcx, $V>>::ArenaCache
|
$arena
|
||||||
};
|
};
|
||||||
([$other:tt $($modifiers:tt)*][$($args:tt)*]) => {
|
([$other:tt $($modifiers:tt)*]$($args:tt)*) => {
|
||||||
query_storage!([$($modifiers)*][$($args)*])
|
query_if_arena!([$($modifiers)*]$($args)*)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -184,23 +184,30 @@ macro_rules! define_callbacks {
|
||||||
|
|
||||||
$(pub type $name<'tcx> = $($K)*;)*
|
$(pub type $name<'tcx> = $($K)*;)*
|
||||||
}
|
}
|
||||||
#[allow(nonstandard_style, unused_lifetimes)]
|
#[allow(nonstandard_style, unused_lifetimes, unused_parens)]
|
||||||
pub mod query_values {
|
pub mod query_values {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
$(pub type $name<'tcx> = $V;)*
|
$(pub type $name<'tcx> = query_if_arena!([$($modifiers)*] <$V as Deref>::Target, $V);)*
|
||||||
}
|
}
|
||||||
#[allow(nonstandard_style, unused_lifetimes)]
|
#[allow(nonstandard_style, unused_lifetimes, unused_parens)]
|
||||||
pub mod query_storage {
|
pub mod query_storage {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
$(pub type $name<'tcx> = query_storage!([$($modifiers)*][$($K)*, $V]);)*
|
$(
|
||||||
|
pub type $name<'tcx> = query_if_arena!([$($modifiers)*]
|
||||||
|
<<$($K)* as Key>::CacheSelector
|
||||||
|
as CacheSelector<'tcx, <$V as Deref>::Target>>::ArenaCache,
|
||||||
|
<<$($K)* as Key>::CacheSelector as CacheSelector<'tcx, $V>>::Cache
|
||||||
|
);
|
||||||
|
)*
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(nonstandard_style, unused_lifetimes)]
|
#[allow(nonstandard_style, unused_lifetimes)]
|
||||||
pub mod query_stored {
|
pub mod query_stored {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
$(pub type $name<'tcx> = <query_storage::$name<'tcx> as QueryStorage>::Stored;)*
|
$(pub type $name<'tcx> = $V;)*
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
@ -226,7 +233,7 @@ macro_rules! define_callbacks {
|
||||||
$($(#[$attr])*
|
$($(#[$attr])*
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> query_stored::$name<'tcx>
|
pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> $V
|
||||||
{
|
{
|
||||||
self.at(DUMMY_SP).$name(key)
|
self.at(DUMMY_SP).$name(key)
|
||||||
})*
|
})*
|
||||||
|
@ -235,7 +242,7 @@ macro_rules! define_callbacks {
|
||||||
impl<'tcx> TyCtxtAt<'tcx> {
|
impl<'tcx> TyCtxtAt<'tcx> {
|
||||||
$($(#[$attr])*
|
$($(#[$attr])*
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> query_stored::$name<'tcx>
|
pub fn $name(self, key: query_helper_param_ty!($($K)*)) -> $V
|
||||||
{
|
{
|
||||||
let key = key.into_query_param();
|
let key = key.into_query_param();
|
||||||
opt_remap_env_constness!([$($modifiers)*][key]);
|
opt_remap_env_constness!([$($modifiers)*][key]);
|
||||||
|
@ -306,7 +313,7 @@ macro_rules! define_callbacks {
|
||||||
span: Span,
|
span: Span,
|
||||||
key: query_keys::$name<'tcx>,
|
key: query_keys::$name<'tcx>,
|
||||||
mode: QueryMode,
|
mode: QueryMode,
|
||||||
) -> Option<query_stored::$name<'tcx>>;)*
|
) -> Option<$V>;)*
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -328,7 +335,7 @@ macro_rules! define_feedable {
|
||||||
$(impl<'tcx, K: IntoQueryParam<$($K)*> + Copy> TyCtxtFeed<'tcx, K> {
|
$(impl<'tcx, K: IntoQueryParam<$($K)*> + Copy> TyCtxtFeed<'tcx, K> {
|
||||||
$(#[$attr])*
|
$(#[$attr])*
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn $name(self, value: $V) -> query_stored::$name<'tcx> {
|
pub fn $name(self, value: query_values::$name<'tcx>) -> $V {
|
||||||
let key = self.key().into_query_param();
|
let key = self.key().into_query_param();
|
||||||
opt_remap_env_constness!([$($modifiers)*][key]);
|
opt_remap_env_constness!([$($modifiers)*][key]);
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::elaborate_drops::DropFlagState;
|
use crate::elaborate_drops::DropFlagState;
|
||||||
use rustc_middle::mir::{self, Body, Location};
|
use rustc_middle::mir::{self, Body, Location, Terminator, TerminatorKind};
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, TyCtxt};
|
||||||
use rustc_target::abi::VariantIdx;
|
use rustc_target::abi::VariantIdx;
|
||||||
|
|
||||||
|
@ -194,6 +194,17 @@ pub fn drop_flag_effects_for_location<'tcx, F>(
|
||||||
on_all_children_bits(tcx, body, move_data, path, |mpi| callback(mpi, DropFlagState::Absent))
|
on_all_children_bits(tcx, body, move_data, path, |mpi| callback(mpi, DropFlagState::Absent))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Drop does not count as a move but we should still consider the variable uninitialized.
|
||||||
|
if let Some(Terminator { kind: TerminatorKind::Drop { place, .. }, .. }) =
|
||||||
|
body.stmt_at(loc).right()
|
||||||
|
{
|
||||||
|
if let LookupResult::Exact(mpi) = move_data.rev_lookup.find(place.as_ref()) {
|
||||||
|
on_all_children_bits(tcx, body, move_data, mpi, |mpi| {
|
||||||
|
callback(mpi, DropFlagState::Absent)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
debug!("drop_flag_effects: assignment for location({:?})", loc);
|
debug!("drop_flag_effects: assignment for location({:?})", loc);
|
||||||
|
|
||||||
for_location_inits(tcx, body, move_data, loc, |mpi| callback(mpi, DropFlagState::Present));
|
for_location_inits(tcx, body, move_data, loc, |mpi| callback(mpi, DropFlagState::Present));
|
||||||
|
|
|
@ -376,7 +376,8 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
|
||||||
| TerminatorKind::Resume
|
| TerminatorKind::Resume
|
||||||
| TerminatorKind::Abort
|
| TerminatorKind::Abort
|
||||||
| TerminatorKind::GeneratorDrop
|
| TerminatorKind::GeneratorDrop
|
||||||
| TerminatorKind::Unreachable => {}
|
| TerminatorKind::Unreachable
|
||||||
|
| TerminatorKind::Drop { .. } => {}
|
||||||
|
|
||||||
TerminatorKind::Assert { ref cond, .. } => {
|
TerminatorKind::Assert { ref cond, .. } => {
|
||||||
self.gather_operand(cond);
|
self.gather_operand(cond);
|
||||||
|
@ -391,10 +392,6 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
|
||||||
self.create_move_path(place);
|
self.create_move_path(place);
|
||||||
self.gather_init(place.as_ref(), InitKind::Deep);
|
self.gather_init(place.as_ref(), InitKind::Deep);
|
||||||
}
|
}
|
||||||
|
|
||||||
TerminatorKind::Drop { place, target: _, unwind: _ } => {
|
|
||||||
self.gather_move(place);
|
|
||||||
}
|
|
||||||
TerminatorKind::DropAndReplace { place, ref value, .. } => {
|
TerminatorKind::DropAndReplace { place, ref value, .. } => {
|
||||||
self.create_move_path(place);
|
self.create_move_path(place);
|
||||||
self.gather_operand(value);
|
self.gather_operand(value);
|
||||||
|
|
|
@ -223,13 +223,13 @@ pub trait ValueAnalysis<'tcx> {
|
||||||
self.super_terminator(terminator, state)
|
self.super_terminator(terminator, state)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn super_terminator(&self, terminator: &Terminator<'tcx>, _state: &mut State<Self::Value>) {
|
fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
|
||||||
match &terminator.kind {
|
match &terminator.kind {
|
||||||
TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
|
TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
|
||||||
// Effect is applied by `handle_call_return`.
|
// Effect is applied by `handle_call_return`.
|
||||||
}
|
}
|
||||||
TerminatorKind::Drop { .. } => {
|
TerminatorKind::Drop { place, .. } => {
|
||||||
// We don't track dropped places.
|
state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
|
||||||
}
|
}
|
||||||
TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
|
TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
|
||||||
// They would have an effect, but are not allowed in this phase.
|
// They would have an effect, but are not allowed in this phase.
|
||||||
|
|
|
@ -18,6 +18,35 @@ use rustc_span::Span;
|
||||||
use rustc_target::abi::VariantIdx;
|
use rustc_target::abi::VariantIdx;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
/// During MIR building, Drop and DropAndReplace terminators are inserted in every place where a drop may occur.
|
||||||
|
/// However, in this phase, the presence of these terminators does not guarantee that a destructor will run,
|
||||||
|
/// as the target of the drop may be uninitialized.
|
||||||
|
/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
|
||||||
|
///
|
||||||
|
/// At a high level, this pass refines Drop and DropAndReplace to only run the destructor if the
|
||||||
|
/// target is initialized. The way this is achievied is by inserting drop flags for every variable
|
||||||
|
/// that may be dropped, and then using those flags to determine whether a destructor should run.
|
||||||
|
/// This pass also removes DropAndReplace, replacing it with a Drop paired with an assign statement.
|
||||||
|
/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
|
||||||
|
/// "drop shim" for the type of the dropped place.
|
||||||
|
///
|
||||||
|
/// This pass relies on dropped places having an associated move path, which is then used to determine
|
||||||
|
/// the initialization status of the place and its descendants.
|
||||||
|
/// It's worth noting that a MIR containing a Drop without an associated move path is probably ill formed,
|
||||||
|
/// as it would allow running a destructor on a place behind a reference:
|
||||||
|
///
|
||||||
|
/// ```text
|
||||||
|
// fn drop_term<T>(t: &mut T) {
|
||||||
|
// mir!(
|
||||||
|
// {
|
||||||
|
// Drop(*t, exit)
|
||||||
|
// }
|
||||||
|
// exit = {
|
||||||
|
// Return()
|
||||||
|
// }
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
/// ```
|
||||||
pub struct ElaborateDrops;
|
pub struct ElaborateDrops;
|
||||||
|
|
||||||
impl<'tcx> MirPass<'tcx> for ElaborateDrops {
|
impl<'tcx> MirPass<'tcx> for ElaborateDrops {
|
||||||
|
|
|
@ -74,7 +74,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Subtype(pred) => {
|
ty::PredicateKind::Subtype(pred) => {
|
||||||
let (a, b) = infcx.replace_bound_vars_with_placeholders(
|
let (a, b) = infcx.instantiate_binder_with_placeholders(
|
||||||
goal.predicate.kind().rebind((pred.a, pred.b)),
|
goal.predicate.kind().rebind((pred.a, pred.b)),
|
||||||
);
|
);
|
||||||
let expected_found = ExpectedFound::new(true, a, b);
|
let expected_found = ExpectedFound::new(true, a, b);
|
||||||
|
@ -84,7 +84,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Coerce(pred) => {
|
ty::PredicateKind::Coerce(pred) => {
|
||||||
let (a, b) = infcx.replace_bound_vars_with_placeholders(
|
let (a, b) = infcx.instantiate_binder_with_placeholders(
|
||||||
goal.predicate.kind().rebind((pred.a, pred.b)),
|
goal.predicate.kind().rebind((pred.a, pred.b)),
|
||||||
);
|
);
|
||||||
let expected_found = ExpectedFound::new(false, a, b);
|
let expected_found = ExpectedFound::new(false, a, b);
|
||||||
|
@ -94,7 +94,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
ty::PredicateKind::ConstEquate(a, b) => {
|
ty::PredicateKind::ConstEquate(a, b) => {
|
||||||
let (a, b) = infcx.replace_bound_vars_with_placeholders(
|
let (a, b) = infcx.instantiate_binder_with_placeholders(
|
||||||
goal.predicate.kind().rebind((a, b)),
|
goal.predicate.kind().rebind((a, b)),
|
||||||
);
|
);
|
||||||
let expected_found = ExpectedFound::new(true, a, b);
|
let expected_found = ExpectedFound::new(true, a, b);
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub(super) trait InferCtxtExt<'tcx> {
|
||||||
rhs: T,
|
rhs: T,
|
||||||
) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution>;
|
) -> Result<Vec<Goal<'tcx, ty::Predicate<'tcx>>>, NoSolution>;
|
||||||
|
|
||||||
fn instantiate_bound_vars_with_infer<T: TypeFoldable<'tcx> + Copy>(
|
fn instantiate_binder_with_infer<T: TypeFoldable<'tcx> + Copy>(
|
||||||
&self,
|
&self,
|
||||||
value: ty::Binder<'tcx, T>,
|
value: ty::Binder<'tcx, T>,
|
||||||
) -> T;
|
) -> T;
|
||||||
|
@ -65,11 +65,11 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn instantiate_bound_vars_with_infer<T: TypeFoldable<'tcx> + Copy>(
|
fn instantiate_binder_with_infer<T: TypeFoldable<'tcx> + Copy>(
|
||||||
&self,
|
&self,
|
||||||
value: ty::Binder<'tcx, T>,
|
value: ty::Binder<'tcx, T>,
|
||||||
) -> T {
|
) -> T {
|
||||||
self.replace_bound_vars_with_fresh_vars(
|
self.instantiate_binder_with_fresh_vars(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
LateBoundRegionConversionTime::HigherRankedType,
|
LateBoundRegionConversionTime::HigherRankedType,
|
||||||
value,
|
value,
|
||||||
|
|
|
@ -304,7 +304,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let kind = self.infcx.replace_bound_vars_with_placeholders(kind);
|
let kind = self.infcx.instantiate_binder_with_placeholders(kind);
|
||||||
let goal = goal.with(self.tcx(), ty::Binder::dummy(kind));
|
let goal = goal.with(self.tcx(), ty::Binder::dummy(kind));
|
||||||
let (_, certainty) = self.evaluate_goal(goal)?;
|
let (_, certainty) = self.evaluate_goal(goal)?;
|
||||||
self.make_canonical_response(certainty)
|
self.make_canonical_response(certainty)
|
||||||
|
|
|
@ -323,7 +323,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||||
{
|
{
|
||||||
ecx.infcx.probe(|_| {
|
ecx.infcx.probe(|_| {
|
||||||
let assumption_projection_pred =
|
let assumption_projection_pred =
|
||||||
ecx.infcx.instantiate_bound_vars_with_infer(poly_projection_pred);
|
ecx.infcx.instantiate_binder_with_infer(poly_projection_pred);
|
||||||
let nested_goals = ecx.infcx.eq(
|
let nested_goals = ecx.infcx.eq(
|
||||||
goal.param_env,
|
goal.param_env,
|
||||||
goal.predicate.projection_ty,
|
goal.predicate.projection_ty,
|
||||||
|
|
|
@ -72,7 +72,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
|
||||||
// FIXME: Constness and polarity
|
// FIXME: Constness and polarity
|
||||||
ecx.infcx.probe(|_| {
|
ecx.infcx.probe(|_| {
|
||||||
let assumption_trait_pred =
|
let assumption_trait_pred =
|
||||||
ecx.infcx.instantiate_bound_vars_with_infer(poly_trait_pred);
|
ecx.infcx.instantiate_binder_with_infer(poly_trait_pred);
|
||||||
let nested_goals = ecx.infcx.eq(
|
let nested_goals = ecx.infcx.eq(
|
||||||
goal.param_env,
|
goal.param_env,
|
||||||
goal.predicate.trait_ref,
|
goal.predicate.trait_ref,
|
||||||
|
|
|
@ -54,7 +54,7 @@ pub(super) fn instantiate_constituent_tys_for_auto_trait<'tcx>(
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::GeneratorWitness(types) => {
|
ty::GeneratorWitness(types) => {
|
||||||
Ok(infcx.replace_bound_vars_with_placeholders(types).to_vec())
|
Ok(infcx.instantiate_binder_with_placeholders(types).to_vec())
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::GeneratorWitnessMIR(..) => todo!(),
|
ty::GeneratorWitnessMIR(..) => todo!(),
|
||||||
|
@ -174,7 +174,7 @@ pub(super) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>(
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::GeneratorWitness(types) => {
|
ty::GeneratorWitness(types) => {
|
||||||
Ok(infcx.replace_bound_vars_with_placeholders(types).to_vec())
|
Ok(infcx.instantiate_binder_with_placeholders(types).to_vec())
|
||||||
}
|
}
|
||||||
|
|
||||||
ty::GeneratorWitnessMIR(..) => todo!(),
|
ty::GeneratorWitnessMIR(..) => todo!(),
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub fn recompute_applicable_impls<'tcx>(
|
||||||
let impl_may_apply = |impl_def_id| {
|
let impl_may_apply = |impl_def_id| {
|
||||||
let ocx = ObligationCtxt::new_in_snapshot(infcx);
|
let ocx = ObligationCtxt::new_in_snapshot(infcx);
|
||||||
let placeholder_obligation =
|
let placeholder_obligation =
|
||||||
infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let obligation_trait_ref =
|
let obligation_trait_ref =
|
||||||
ocx.normalize(&ObligationCause::dummy(), param_env, placeholder_obligation.trait_ref);
|
ocx.normalize(&ObligationCause::dummy(), param_env, placeholder_obligation.trait_ref);
|
||||||
|
|
||||||
|
@ -47,11 +47,11 @@ pub fn recompute_applicable_impls<'tcx>(
|
||||||
let param_env_candidate_may_apply = |poly_trait_predicate: ty::PolyTraitPredicate<'tcx>| {
|
let param_env_candidate_may_apply = |poly_trait_predicate: ty::PolyTraitPredicate<'tcx>| {
|
||||||
let ocx = ObligationCtxt::new_in_snapshot(infcx);
|
let ocx = ObligationCtxt::new_in_snapshot(infcx);
|
||||||
let placeholder_obligation =
|
let placeholder_obligation =
|
||||||
infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let obligation_trait_ref =
|
let obligation_trait_ref =
|
||||||
ocx.normalize(&ObligationCause::dummy(), param_env, placeholder_obligation.trait_ref);
|
ocx.normalize(&ObligationCause::dummy(), param_env, placeholder_obligation.trait_ref);
|
||||||
|
|
||||||
let param_env_predicate = infcx.replace_bound_vars_with_fresh_vars(
|
let param_env_predicate = infcx.instantiate_binder_with_fresh_vars(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
LateBoundRegionConversionTime::HigherRankedType,
|
LateBoundRegionConversionTime::HigherRankedType,
|
||||||
poly_trait_predicate,
|
poly_trait_predicate,
|
||||||
|
|
|
@ -1716,7 +1716,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||||
let (values, err) = if let ty::PredicateKind::Clause(ty::Clause::Projection(data)) =
|
let (values, err) = if let ty::PredicateKind::Clause(ty::Clause::Projection(data)) =
|
||||||
bound_predicate.skip_binder()
|
bound_predicate.skip_binder()
|
||||||
{
|
{
|
||||||
let data = self.replace_bound_vars_with_fresh_vars(
|
let data = self.instantiate_binder_with_fresh_vars(
|
||||||
obligation.cause.span,
|
obligation.cause.span,
|
||||||
infer::LateBoundRegionConversionTime::HigherRankedType,
|
infer::LateBoundRegionConversionTime::HigherRankedType,
|
||||||
bound_predicate.rebind(data),
|
bound_predicate.rebind(data),
|
||||||
|
|
|
@ -898,7 +898,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
let self_ty = self.replace_bound_vars_with_fresh_vars(
|
let self_ty = self.instantiate_binder_with_fresh_vars(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
trait_pred.self_ty(),
|
trait_pred.self_ty(),
|
||||||
|
@ -1191,7 +1191,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||||
}
|
}
|
||||||
}) else { return None; };
|
}) else { return None; };
|
||||||
|
|
||||||
let output = self.replace_bound_vars_with_fresh_vars(
|
let output = self.instantiate_binder_with_fresh_vars(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
output,
|
output,
|
||||||
|
@ -1200,7 +1200,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||||
.skip_binder()
|
.skip_binder()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ty| {
|
.map(|ty| {
|
||||||
self.replace_bound_vars_with_fresh_vars(
|
self.instantiate_binder_with_fresh_vars(
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
LateBoundRegionConversionTime::FnCall,
|
LateBoundRegionConversionTime::FnCall,
|
||||||
inputs.rebind(*ty),
|
inputs.rebind(*ty),
|
||||||
|
@ -3806,13 +3806,13 @@ fn hint_missing_borrow<'tcx>(
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
) {
|
) {
|
||||||
let found_args = match found.kind() {
|
let found_args = match found.kind() {
|
||||||
ty::FnPtr(f) => infcx.replace_bound_vars_with_placeholders(*f).inputs().iter(),
|
ty::FnPtr(f) => infcx.instantiate_binder_with_placeholders(*f).inputs().iter(),
|
||||||
kind => {
|
kind => {
|
||||||
span_bug!(span, "found was converted to a FnPtr above but is now {:?}", kind)
|
span_bug!(span, "found was converted to a FnPtr above but is now {:?}", kind)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let expected_args = match expected.kind() {
|
let expected_args = match expected.kind() {
|
||||||
ty::FnPtr(f) => infcx.replace_bound_vars_with_placeholders(*f).inputs().iter(),
|
ty::FnPtr(f) => infcx.instantiate_binder_with_placeholders(*f).inputs().iter(),
|
||||||
kind => {
|
kind => {
|
||||||
span_bug!(span, "expected was converted to a FnPtr above but is now {:?}", kind)
|
span_bug!(span, "expected was converted to a FnPtr above but is now {:?}", kind)
|
||||||
}
|
}
|
||||||
|
|
|
@ -321,7 +321,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
|
||||||
| ty::PredicateKind::ConstEvaluatable(..)
|
| ty::PredicateKind::ConstEvaluatable(..)
|
||||||
| ty::PredicateKind::ConstEquate(..) => {
|
| ty::PredicateKind::ConstEquate(..) => {
|
||||||
let pred =
|
let pred =
|
||||||
ty::Binder::dummy(infcx.replace_bound_vars_with_placeholders(binder));
|
ty::Binder::dummy(infcx.instantiate_binder_with_placeholders(binder));
|
||||||
ProcessResult::Changed(mk_pending(vec![obligation.with(infcx.tcx, pred)]))
|
ProcessResult::Changed(mk_pending(vec![obligation.with(infcx.tcx, pred)]))
|
||||||
}
|
}
|
||||||
ty::PredicateKind::Ambiguous => ProcessResult::Unchanged,
|
ty::PredicateKind::Ambiguous => ProcessResult::Unchanged,
|
||||||
|
|
|
@ -215,7 +215,7 @@ pub(super) fn poly_project_and_unify_type<'cx, 'tcx>(
|
||||||
let r = infcx.commit_if_ok(|_snapshot| {
|
let r = infcx.commit_if_ok(|_snapshot| {
|
||||||
let old_universe = infcx.universe();
|
let old_universe = infcx.universe();
|
||||||
let placeholder_predicate =
|
let placeholder_predicate =
|
||||||
infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let new_universe = infcx.universe();
|
let new_universe = infcx.universe();
|
||||||
|
|
||||||
let placeholder_obligation = obligation.with(infcx.tcx, placeholder_predicate);
|
let placeholder_obligation = obligation.with(infcx.tcx, placeholder_predicate);
|
||||||
|
@ -2046,7 +2046,7 @@ fn confirm_param_env_candidate<'cx, 'tcx>(
|
||||||
let cause = &obligation.cause;
|
let cause = &obligation.cause;
|
||||||
let param_env = obligation.param_env;
|
let param_env = obligation.param_env;
|
||||||
|
|
||||||
let cache_entry = infcx.replace_bound_vars_with_fresh_vars(
|
let cache_entry = infcx.instantiate_binder_with_fresh_vars(
|
||||||
cause.span,
|
cause.span,
|
||||||
LateBoundRegionConversionTime::HigherRankedType,
|
LateBoundRegionConversionTime::HigherRankedType,
|
||||||
poly_cache_entry,
|
poly_cache_entry,
|
||||||
|
|
|
@ -488,7 +488,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|
|
||||||
let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
|
let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
|
||||||
let placeholder_trait_predicate =
|
let placeholder_trait_predicate =
|
||||||
self.infcx.replace_bound_vars_with_placeholders(poly_trait_predicate);
|
self.infcx.instantiate_binder_with_placeholders(poly_trait_predicate);
|
||||||
|
|
||||||
// Count only those upcast versions that match the trait-ref
|
// Count only those upcast versions that match the trait-ref
|
||||||
// we are looking for. Specifically, do not only check for the
|
// we are looking for. Specifically, do not only check for the
|
||||||
|
|
|
@ -151,7 +151,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|
|
||||||
let trait_predicate = self.infcx.shallow_resolve(obligation.predicate);
|
let trait_predicate = self.infcx.shallow_resolve(obligation.predicate);
|
||||||
let placeholder_trait_predicate =
|
let placeholder_trait_predicate =
|
||||||
self.infcx.replace_bound_vars_with_placeholders(trait_predicate).trait_ref;
|
self.infcx.instantiate_binder_with_placeholders(trait_predicate).trait_ref;
|
||||||
let placeholder_self_ty = placeholder_trait_predicate.self_ty();
|
let placeholder_self_ty = placeholder_trait_predicate.self_ty();
|
||||||
let placeholder_trait_predicate = ty::Binder::dummy(placeholder_trait_predicate);
|
let placeholder_trait_predicate = ty::Binder::dummy(placeholder_trait_predicate);
|
||||||
let (def_id, substs) = match *placeholder_self_ty.kind() {
|
let (def_id, substs) = match *placeholder_self_ty.kind() {
|
||||||
|
@ -336,7 +336,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
let cause = obligation.derived_cause(BuiltinDerivedObligation);
|
let cause = obligation.derived_cause(BuiltinDerivedObligation);
|
||||||
|
|
||||||
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
|
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
|
||||||
let trait_ref = self.infcx.replace_bound_vars_with_placeholders(poly_trait_ref);
|
let trait_ref = self.infcx.instantiate_binder_with_placeholders(poly_trait_ref);
|
||||||
let trait_obligations: Vec<PredicateObligation<'_>> = self.impl_or_trait_obligations(
|
let trait_obligations: Vec<PredicateObligation<'_>> = self.impl_or_trait_obligations(
|
||||||
&cause,
|
&cause,
|
||||||
obligation.recursion_depth + 1,
|
obligation.recursion_depth + 1,
|
||||||
|
@ -427,7 +427,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
debug!(?obligation, ?index, "confirm_object_candidate");
|
debug!(?obligation, ?index, "confirm_object_candidate");
|
||||||
|
|
||||||
let trait_predicate = self.infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
let trait_predicate = self.infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let self_ty = self.infcx.shallow_resolve(trait_predicate.self_ty());
|
let self_ty = self.infcx.shallow_resolve(trait_predicate.self_ty());
|
||||||
let obligation_trait_ref = ty::Binder::dummy(trait_predicate.trait_ref);
|
let obligation_trait_ref = ty::Binder::dummy(trait_predicate.trait_ref);
|
||||||
let ty::Dynamic(data, ..) = *self_ty.kind() else {
|
let ty::Dynamic(data, ..) = *self_ty.kind() else {
|
||||||
|
@ -437,7 +437,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
let object_trait_ref = data.principal().unwrap_or_else(|| {
|
let object_trait_ref = data.principal().unwrap_or_else(|| {
|
||||||
span_bug!(obligation.cause.span, "object candidate with no principal")
|
span_bug!(obligation.cause.span, "object candidate with no principal")
|
||||||
});
|
});
|
||||||
let object_trait_ref = self.infcx.replace_bound_vars_with_fresh_vars(
|
let object_trait_ref = self.infcx.instantiate_binder_with_fresh_vars(
|
||||||
obligation.cause.span,
|
obligation.cause.span,
|
||||||
HigherRankedType,
|
HigherRankedType,
|
||||||
object_trait_ref,
|
object_trait_ref,
|
||||||
|
@ -629,7 +629,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Confirm the `type Output: Sized;` bound that is present on `FnOnce`
|
// Confirm the `type Output: Sized;` bound that is present on `FnOnce`
|
||||||
let output_ty = self.infcx.replace_bound_vars_with_placeholders(sig.output());
|
let output_ty = self.infcx.instantiate_binder_with_placeholders(sig.output());
|
||||||
let output_ty = normalize_with_depth_to(
|
let output_ty = normalize_with_depth_to(
|
||||||
self,
|
self,
|
||||||
obligation.param_env,
|
obligation.param_env,
|
||||||
|
@ -652,7 +652,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
debug!(?obligation, "confirm_trait_alias_candidate");
|
debug!(?obligation, "confirm_trait_alias_candidate");
|
||||||
|
|
||||||
let alias_def_id = obligation.predicate.def_id();
|
let alias_def_id = obligation.predicate.def_id();
|
||||||
let predicate = self.infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
let predicate = self.infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let trait_ref = predicate.trait_ref;
|
let trait_ref = predicate.trait_ref;
|
||||||
let trait_def_id = trait_ref.def_id;
|
let trait_def_id = trait_ref.def_id;
|
||||||
let substs = trait_ref.substs;
|
let substs = trait_ref.substs;
|
||||||
|
|
|
@ -1618,7 +1618,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
) -> smallvec::SmallVec<[(usize, ty::BoundConstness); 2]> {
|
) -> smallvec::SmallVec<[(usize, ty::BoundConstness); 2]> {
|
||||||
let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
|
let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
|
||||||
let placeholder_trait_predicate =
|
let placeholder_trait_predicate =
|
||||||
self.infcx.replace_bound_vars_with_placeholders(poly_trait_predicate);
|
self.infcx.instantiate_binder_with_placeholders(poly_trait_predicate);
|
||||||
debug!(?placeholder_trait_predicate);
|
debug!(?placeholder_trait_predicate);
|
||||||
|
|
||||||
let tcx = self.infcx.tcx;
|
let tcx = self.infcx.tcx;
|
||||||
|
@ -1738,7 +1738,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
potentially_unnormalized_candidates: bool,
|
potentially_unnormalized_candidates: bool,
|
||||||
) -> ProjectionMatchesProjection {
|
) -> ProjectionMatchesProjection {
|
||||||
let mut nested_obligations = Vec::new();
|
let mut nested_obligations = Vec::new();
|
||||||
let infer_predicate = self.infcx.replace_bound_vars_with_fresh_vars(
|
let infer_predicate = self.infcx.instantiate_binder_with_fresh_vars(
|
||||||
obligation.cause.span,
|
obligation.cause.span,
|
||||||
LateBoundRegionConversionTime::HigherRankedType,
|
LateBoundRegionConversionTime::HigherRankedType,
|
||||||
env_predicate,
|
env_predicate,
|
||||||
|
@ -2339,7 +2339,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
.flat_map(|ty| {
|
.flat_map(|ty| {
|
||||||
let ty: ty::Binder<'tcx, Ty<'tcx>> = types.rebind(*ty); // <----/
|
let ty: ty::Binder<'tcx, Ty<'tcx>> = types.rebind(*ty); // <----/
|
||||||
|
|
||||||
let placeholder_ty = self.infcx.replace_bound_vars_with_placeholders(ty);
|
let placeholder_ty = self.infcx.instantiate_binder_with_placeholders(ty);
|
||||||
let Normalized { value: normalized_ty, mut obligations } =
|
let Normalized { value: normalized_ty, mut obligations } =
|
||||||
ensure_sufficient_stack(|| {
|
ensure_sufficient_stack(|| {
|
||||||
project::normalize_with_depth(
|
project::normalize_with_depth(
|
||||||
|
@ -2418,7 +2418,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
) -> Result<Normalized<'tcx, SubstsRef<'tcx>>, ()> {
|
) -> Result<Normalized<'tcx, SubstsRef<'tcx>>, ()> {
|
||||||
let placeholder_obligation =
|
let placeholder_obligation =
|
||||||
self.infcx.replace_bound_vars_with_placeholders(obligation.predicate);
|
self.infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let placeholder_obligation_trait_ref = placeholder_obligation.trait_ref;
|
let placeholder_obligation_trait_ref = placeholder_obligation.trait_ref;
|
||||||
|
|
||||||
let impl_substs = self.infcx.fresh_substs_for_item(obligation.cause.span, impl_def_id);
|
let impl_substs = self.infcx.fresh_substs_for_item(obligation.cause.span, impl_def_id);
|
||||||
|
|
|
@ -41,6 +41,28 @@ impl<'a, T> DormantMutRef<'a, T> {
|
||||||
// SAFETY: our own safety conditions imply this reference is again unique.
|
// SAFETY: our own safety conditions imply this reference is again unique.
|
||||||
unsafe { &mut *self.ptr.as_ptr() }
|
unsafe { &mut *self.ptr.as_ptr() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Borrows a new mutable reference from the unique borrow initially captured.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The reborrow must have ended, i.e., the reference returned by `new` and
|
||||||
|
/// all pointers and references derived from it, must not be used anymore.
|
||||||
|
pub unsafe fn reborrow(&mut self) -> &'a mut T {
|
||||||
|
// SAFETY: our own safety conditions imply this reference is again unique.
|
||||||
|
unsafe { &mut *self.ptr.as_ptr() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Borrows a new shared reference from the unique borrow initially captured.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The reborrow must have ended, i.e., the reference returned by `new` and
|
||||||
|
/// all pointers and references derived from it, must not be used anymore.
|
||||||
|
pub unsafe fn reborrow_shared(&self) -> &'a T {
|
||||||
|
// SAFETY: our own safety conditions imply this reference is again unique.
|
||||||
|
unsafe { &*self.ptr.as_ptr() }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -6,7 +6,7 @@ use core::hash::{Hash, Hasher};
|
||||||
use core::iter::{FromIterator, FusedIterator};
|
use core::iter::{FromIterator, FusedIterator};
|
||||||
use core::marker::PhantomData;
|
use core::marker::PhantomData;
|
||||||
use core::mem::{self, ManuallyDrop};
|
use core::mem::{self, ManuallyDrop};
|
||||||
use core::ops::{Index, RangeBounds};
|
use core::ops::{Bound, Index, RangeBounds};
|
||||||
use core::ptr;
|
use core::ptr;
|
||||||
|
|
||||||
use crate::alloc::{Allocator, Global};
|
use crate::alloc::{Allocator, Global};
|
||||||
|
@ -15,7 +15,7 @@ use super::borrow::DormantMutRef;
|
||||||
use super::dedup_sorted_iter::DedupSortedIter;
|
use super::dedup_sorted_iter::DedupSortedIter;
|
||||||
use super::navigate::{LazyLeafRange, LeafRange};
|
use super::navigate::{LazyLeafRange, LeafRange};
|
||||||
use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
|
use super::node::{self, marker, ForceResult::*, Handle, NodeRef, Root};
|
||||||
use super::search::SearchResult::*;
|
use super::search::{SearchBound, SearchResult::*};
|
||||||
use super::set_val::SetValZST;
|
use super::set_val::SetValZST;
|
||||||
|
|
||||||
mod entry;
|
mod entry;
|
||||||
|
@ -2422,6 +2422,732 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
|
||||||
pub const fn is_empty(&self) -> bool {
|
pub const fn is_empty(&self) -> bool {
|
||||||
self.len() == 0
|
self.len() == 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a [`Cursor`] pointing at the first element that is above the
|
||||||
|
/// given bound.
|
||||||
|
///
|
||||||
|
/// If no such element exists then a cursor pointing at the "ghost"
|
||||||
|
/// non-element is returned.
|
||||||
|
///
|
||||||
|
/// Passing [`Bound::Unbounded`] will return a cursor pointing at the first
|
||||||
|
/// element of the map.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(btree_cursors)]
|
||||||
|
///
|
||||||
|
/// use std::collections::BTreeMap;
|
||||||
|
/// use std::ops::Bound;
|
||||||
|
///
|
||||||
|
/// let mut a = BTreeMap::new();
|
||||||
|
/// a.insert(1, "a");
|
||||||
|
/// a.insert(2, "b");
|
||||||
|
/// a.insert(3, "c");
|
||||||
|
/// a.insert(4, "c");
|
||||||
|
/// let cursor = a.lower_bound(Bound::Excluded(&2));
|
||||||
|
/// assert_eq!(cursor.key(), Some(&3));
|
||||||
|
/// ```
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn lower_bound<Q>(&self, bound: Bound<&Q>) -> Cursor<'_, K, V>
|
||||||
|
where
|
||||||
|
K: Borrow<Q> + Ord,
|
||||||
|
Q: Ord,
|
||||||
|
{
|
||||||
|
let root_node = match self.root.as_ref() {
|
||||||
|
None => return Cursor { current: None, root: None },
|
||||||
|
Some(root) => root.reborrow(),
|
||||||
|
};
|
||||||
|
let edge = root_node.lower_bound(SearchBound::from_range(bound));
|
||||||
|
Cursor { current: edge.next_kv().ok(), root: self.root.as_ref() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a [`CursorMut`] pointing at the first element that is above the
|
||||||
|
/// given bound.
|
||||||
|
///
|
||||||
|
/// If no such element exists then a cursor pointing at the "ghost"
|
||||||
|
/// non-element is returned.
|
||||||
|
///
|
||||||
|
/// Passing [`Bound::Unbounded`] will return a cursor pointing at the first
|
||||||
|
/// element of the map.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(btree_cursors)]
|
||||||
|
///
|
||||||
|
/// use std::collections::BTreeMap;
|
||||||
|
/// use std::ops::Bound;
|
||||||
|
///
|
||||||
|
/// let mut a = BTreeMap::new();
|
||||||
|
/// a.insert(1, "a");
|
||||||
|
/// a.insert(2, "b");
|
||||||
|
/// a.insert(3, "c");
|
||||||
|
/// a.insert(4, "c");
|
||||||
|
/// let cursor = a.lower_bound_mut(Bound::Excluded(&2));
|
||||||
|
/// assert_eq!(cursor.key(), Some(&3));
|
||||||
|
/// ```
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn lower_bound_mut<Q>(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A>
|
||||||
|
where
|
||||||
|
K: Borrow<Q> + Ord,
|
||||||
|
Q: Ord,
|
||||||
|
{
|
||||||
|
let (root, dormant_root) = DormantMutRef::new(&mut self.root);
|
||||||
|
let root_node = match root.as_mut() {
|
||||||
|
None => {
|
||||||
|
return CursorMut {
|
||||||
|
current: None,
|
||||||
|
root: dormant_root,
|
||||||
|
length: &mut self.length,
|
||||||
|
alloc: &mut *self.alloc,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Some(root) => root.borrow_mut(),
|
||||||
|
};
|
||||||
|
let edge = root_node.lower_bound(SearchBound::from_range(bound));
|
||||||
|
CursorMut {
|
||||||
|
current: edge.next_kv().ok(),
|
||||||
|
root: dormant_root,
|
||||||
|
length: &mut self.length,
|
||||||
|
alloc: &mut *self.alloc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a [`Cursor`] pointing at the last element that is below the
|
||||||
|
/// given bound.
|
||||||
|
///
|
||||||
|
/// If no such element exists then a cursor pointing at the "ghost"
|
||||||
|
/// non-element is returned.
|
||||||
|
///
|
||||||
|
/// Passing [`Bound::Unbounded`] will return a cursor pointing at the last
|
||||||
|
/// element of the map.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(btree_cursors)]
|
||||||
|
///
|
||||||
|
/// use std::collections::BTreeMap;
|
||||||
|
/// use std::ops::Bound;
|
||||||
|
///
|
||||||
|
/// let mut a = BTreeMap::new();
|
||||||
|
/// a.insert(1, "a");
|
||||||
|
/// a.insert(2, "b");
|
||||||
|
/// a.insert(3, "c");
|
||||||
|
/// a.insert(4, "c");
|
||||||
|
/// let cursor = a.upper_bound(Bound::Excluded(&3));
|
||||||
|
/// assert_eq!(cursor.key(), Some(&2));
|
||||||
|
/// ```
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn upper_bound<Q>(&self, bound: Bound<&Q>) -> Cursor<'_, K, V>
|
||||||
|
where
|
||||||
|
K: Borrow<Q> + Ord,
|
||||||
|
Q: Ord,
|
||||||
|
{
|
||||||
|
let root_node = match self.root.as_ref() {
|
||||||
|
None => return Cursor { current: None, root: None },
|
||||||
|
Some(root) => root.reborrow(),
|
||||||
|
};
|
||||||
|
let edge = root_node.upper_bound(SearchBound::from_range(bound));
|
||||||
|
Cursor { current: edge.next_back_kv().ok(), root: self.root.as_ref() }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a [`CursorMut`] pointing at the last element that is below the
|
||||||
|
/// given bound.
|
||||||
|
///
|
||||||
|
/// If no such element exists then a cursor pointing at the "ghost"
|
||||||
|
/// non-element is returned.
|
||||||
|
///
|
||||||
|
/// Passing [`Bound::Unbounded`] will return a cursor pointing at the last
|
||||||
|
/// element of the map.
|
||||||
|
///
|
||||||
|
/// # Examples
|
||||||
|
///
|
||||||
|
/// Basic usage:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(btree_cursors)]
|
||||||
|
///
|
||||||
|
/// use std::collections::BTreeMap;
|
||||||
|
/// use std::ops::Bound;
|
||||||
|
///
|
||||||
|
/// let mut a = BTreeMap::new();
|
||||||
|
/// a.insert(1, "a");
|
||||||
|
/// a.insert(2, "b");
|
||||||
|
/// a.insert(3, "c");
|
||||||
|
/// a.insert(4, "c");
|
||||||
|
/// let cursor = a.upper_bound_mut(Bound::Excluded(&3));
|
||||||
|
/// assert_eq!(cursor.key(), Some(&2));
|
||||||
|
/// ```
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn upper_bound_mut<Q>(&mut self, bound: Bound<&Q>) -> CursorMut<'_, K, V, A>
|
||||||
|
where
|
||||||
|
K: Borrow<Q> + Ord,
|
||||||
|
Q: Ord,
|
||||||
|
{
|
||||||
|
let (root, dormant_root) = DormantMutRef::new(&mut self.root);
|
||||||
|
let root_node = match root.as_mut() {
|
||||||
|
None => {
|
||||||
|
return CursorMut {
|
||||||
|
current: None,
|
||||||
|
root: dormant_root,
|
||||||
|
length: &mut self.length,
|
||||||
|
alloc: &mut *self.alloc,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Some(root) => root.borrow_mut(),
|
||||||
|
};
|
||||||
|
let edge = root_node.upper_bound(SearchBound::from_range(bound));
|
||||||
|
CursorMut {
|
||||||
|
current: edge.next_back_kv().ok(),
|
||||||
|
root: dormant_root,
|
||||||
|
length: &mut self.length,
|
||||||
|
alloc: &mut *self.alloc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A cursor over a `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth.
|
||||||
|
///
|
||||||
|
/// Cursors always point to an element in the tree, and index in a logically circular way.
|
||||||
|
/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and
|
||||||
|
/// first elements of the tree.
|
||||||
|
///
|
||||||
|
/// A `Cursor` is created with the [`BTreeMap::lower_bound`] and [`BTreeMap::upper_bound`] methods.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub struct Cursor<'a, K: 'a, V: 'a> {
|
||||||
|
current: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>, marker::KV>>,
|
||||||
|
root: Option<&'a node::Root<K, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
impl<K, V> Clone for Cursor<'_, K, V> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
let Cursor { current, root } = *self;
|
||||||
|
Cursor { current, root }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
impl<K: Debug, V: Debug> Debug for Cursor<'_, K, V> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_tuple("Cursor").field(&self.key_value()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A cursor over a `BTreeMap` with editing operations.
|
||||||
|
///
|
||||||
|
/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can
|
||||||
|
/// safely mutate the tree during iteration. This is because the lifetime of its yielded
|
||||||
|
/// references is tied to its own lifetime, instead of just the underlying tree. This means
|
||||||
|
/// cursors cannot yield multiple elements at once.
|
||||||
|
///
|
||||||
|
/// Cursors always point to an element in the tree, and index in a logically circular way.
|
||||||
|
/// To accommodate this, there is a "ghost" non-element that yields `None` between the last and
|
||||||
|
/// first elements of the tree.
|
||||||
|
///
|
||||||
|
/// A `Cursor` is created with the [`BTreeMap::lower_bound_mut`] and [`BTreeMap::upper_bound_mut`]
|
||||||
|
/// methods.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub struct CursorMut<
|
||||||
|
'a,
|
||||||
|
K: 'a,
|
||||||
|
V: 'a,
|
||||||
|
#[unstable(feature = "allocator_api", issue = "32838")] A = Global,
|
||||||
|
> {
|
||||||
|
current: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>>,
|
||||||
|
root: DormantMutRef<'a, Option<node::Root<K, V>>>,
|
||||||
|
length: &'a mut usize,
|
||||||
|
alloc: &'a mut A,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
impl<K: Debug, V: Debug, A> Debug for CursorMut<'_, K, V, A> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_tuple("CursorMut").field(&self.key_value()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K, V> Cursor<'a, K, V> {
|
||||||
|
/// Moves the cursor to the next element of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this will move it to
|
||||||
|
/// the first element of the `BTreeMap`. If it is pointing to the last
|
||||||
|
/// element of the `BTreeMap` then this will move it to the "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn move_next(&mut self) {
|
||||||
|
match self.current.take() {
|
||||||
|
None => {
|
||||||
|
self.current = self.root.and_then(|root| {
|
||||||
|
root.reborrow().first_leaf_edge().forget_node_type().right_kv().ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(current) => {
|
||||||
|
self.current = current.next_leaf_edge().next_kv().ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the cursor to the previous element of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this will move it to
|
||||||
|
/// the last element of the `BTreeMap`. If it is pointing to the first
|
||||||
|
/// element of the `BTreeMap` then this will move it to the "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn move_prev(&mut self) {
|
||||||
|
match self.current.take() {
|
||||||
|
None => {
|
||||||
|
self.current = self.root.and_then(|root| {
|
||||||
|
root.reborrow().last_leaf_edge().forget_node_type().left_kv().ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(current) => {
|
||||||
|
self.current = current.next_back_leaf_edge().next_back_kv().ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key of the element that the cursor is
|
||||||
|
/// currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn key(&self) -> Option<&'a K> {
|
||||||
|
self.current.as_ref().map(|current| current.into_kv().0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the value of the element that the cursor is
|
||||||
|
/// currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn value(&self) -> Option<&'a V> {
|
||||||
|
self.current.as_ref().map(|current| current.into_kv().1)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key and value of the element that the cursor
|
||||||
|
/// is currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn key_value(&self) -> Option<(&'a K, &'a V)> {
|
||||||
|
self.current.as_ref().map(|current| current.into_kv())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the next element.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this returns
|
||||||
|
/// the first element of the `BTreeMap`. If it is pointing to the last
|
||||||
|
/// element of the `BTreeMap` then this returns `None`.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn peek_next(&self) -> Option<(&'a K, &'a V)> {
|
||||||
|
let mut next = self.clone();
|
||||||
|
next.move_next();
|
||||||
|
next.current.as_ref().map(|current| current.into_kv())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the previous element.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this returns
|
||||||
|
/// the last element of the `BTreeMap`. If it is pointing to the first
|
||||||
|
/// element of the `BTreeMap` then this returns `None`.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn peek_prev(&self) -> Option<(&'a K, &'a V)> {
|
||||||
|
let mut prev = self.clone();
|
||||||
|
prev.move_prev();
|
||||||
|
prev.current.as_ref().map(|current| current.into_kv())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, K, V, A> CursorMut<'a, K, V, A> {
|
||||||
|
/// Moves the cursor to the next element of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this will move it to
|
||||||
|
/// the first element of the `BTreeMap`. If it is pointing to the last
|
||||||
|
/// element of the `BTreeMap` then this will move it to the "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn move_next(&mut self) {
|
||||||
|
match self.current.take() {
|
||||||
|
None => {
|
||||||
|
// SAFETY: The previous borrow of root has ended.
|
||||||
|
self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| {
|
||||||
|
root.borrow_mut().first_leaf_edge().forget_node_type().right_kv().ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(current) => {
|
||||||
|
self.current = current.next_leaf_edge().next_kv().ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Moves the cursor to the previous element of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this will move it to
|
||||||
|
/// the last element of the `BTreeMap`. If it is pointing to the first
|
||||||
|
/// element of the `BTreeMap` then this will move it to the "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn move_prev(&mut self) {
|
||||||
|
match self.current.take() {
|
||||||
|
None => {
|
||||||
|
// SAFETY: The previous borrow of root has ended.
|
||||||
|
self.current = unsafe { self.root.reborrow() }.as_mut().and_then(|root| {
|
||||||
|
root.borrow_mut().last_leaf_edge().forget_node_type().left_kv().ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Some(current) => {
|
||||||
|
self.current = current.next_back_leaf_edge().next_back_kv().ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key of the element that the cursor is
|
||||||
|
/// currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn key(&self) -> Option<&K> {
|
||||||
|
self.current.as_ref().map(|current| current.reborrow().into_kv().0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the value of the element that the cursor is
|
||||||
|
/// currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn value(&self) -> Option<&V> {
|
||||||
|
self.current.as_ref().map(|current| current.reborrow().into_kv().1)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key and value of the element that the cursor
|
||||||
|
/// is currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn key_value(&self) -> Option<(&K, &V)> {
|
||||||
|
self.current.as_ref().map(|current| current.reborrow().into_kv())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a mutable reference to the value of the element that the cursor
|
||||||
|
/// is currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn value_mut(&mut self) -> Option<&mut V> {
|
||||||
|
self.current.as_mut().map(|current| current.kv_mut().1)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key and mutable reference to the value of the
|
||||||
|
/// element that the cursor is currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn key_value_mut(&mut self) -> Option<(&K, &mut V)> {
|
||||||
|
self.current.as_mut().map(|current| {
|
||||||
|
let (k, v) = current.kv_mut();
|
||||||
|
(&*k, v)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a mutable reference to the of the element that the cursor is
|
||||||
|
/// currently pointing to.
|
||||||
|
///
|
||||||
|
/// This returns `None` if the cursor is currently pointing to the
|
||||||
|
/// "ghost" non-element.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// This can be used to modify the key, but you must ensure that the
|
||||||
|
/// `BTreeMap` invariants are maintained. Specifically:
|
||||||
|
///
|
||||||
|
/// * The key must remain unique within the tree.
|
||||||
|
/// * The key must remain in sorted order with regards to other elements in
|
||||||
|
/// the tree.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub unsafe fn key_mut_unchecked(&mut self) -> Option<&mut K> {
|
||||||
|
self.current.as_mut().map(|current| current.kv_mut().0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key and value of the next element.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this returns
|
||||||
|
/// the first element of the `BTreeMap`. If it is pointing to the last
|
||||||
|
/// element of the `BTreeMap` then this returns `None`.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn peek_next(&mut self) -> Option<(&K, &mut V)> {
|
||||||
|
let (k, v) = match self.current {
|
||||||
|
None => {
|
||||||
|
// SAFETY: The previous borrow of root has ended.
|
||||||
|
unsafe { self.root.reborrow() }
|
||||||
|
.as_mut()?
|
||||||
|
.borrow_mut()
|
||||||
|
.first_leaf_edge()
|
||||||
|
.next_kv()
|
||||||
|
.ok()?
|
||||||
|
.into_kv_valmut()
|
||||||
|
}
|
||||||
|
// SAFETY: We're not using this to mutate the tree.
|
||||||
|
Some(ref mut current) => {
|
||||||
|
unsafe { current.reborrow_mut() }.next_leaf_edge().next_kv().ok()?.into_kv_valmut()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Some((k, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a reference to the key and value of the previous element.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing to the "ghost" non-element then this returns
|
||||||
|
/// the last element of the `BTreeMap`. If it is pointing to the first
|
||||||
|
/// element of the `BTreeMap` then this returns `None`.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn peek_prev(&mut self) -> Option<(&K, &mut V)> {
|
||||||
|
let (k, v) = match self.current.as_mut() {
|
||||||
|
None => {
|
||||||
|
// SAFETY: The previous borrow of root has ended.
|
||||||
|
unsafe { self.root.reborrow() }
|
||||||
|
.as_mut()?
|
||||||
|
.borrow_mut()
|
||||||
|
.first_leaf_edge()
|
||||||
|
.next_kv()
|
||||||
|
.ok()?
|
||||||
|
.into_kv_valmut()
|
||||||
|
}
|
||||||
|
Some(current) => {
|
||||||
|
// SAFETY: We're not using this to mutate the tree.
|
||||||
|
unsafe { current.reborrow_mut() }
|
||||||
|
.next_back_leaf_edge()
|
||||||
|
.next_back_kv()
|
||||||
|
.ok()?
|
||||||
|
.into_kv_valmut()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Some((k, v))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a read-only cursor pointing to the current element.
|
||||||
|
///
|
||||||
|
/// The lifetime of the returned `Cursor` is bound to that of the
|
||||||
|
/// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
|
||||||
|
/// `CursorMut` is frozen for the lifetime of the `Cursor`.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn as_cursor(&self) -> Cursor<'_, K, V> {
|
||||||
|
Cursor {
|
||||||
|
// SAFETY: The tree is immutable while the cursor exists.
|
||||||
|
root: unsafe { self.root.reborrow_shared().as_ref() },
|
||||||
|
current: self.current.as_ref().map(|current| current.reborrow()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now the tree editing operations
|
||||||
|
impl<'a, K: Ord, V, A: Allocator + Clone> CursorMut<'a, K, V, A> {
|
||||||
|
/// Inserts a new element into the `BTreeMap` after the current one.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing at the "ghost" non-element then the new element is
|
||||||
|
/// inserted at the front of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// You must ensure that the `BTreeMap` invariants are maintained.
|
||||||
|
/// Specifically:
|
||||||
|
///
|
||||||
|
/// * The key of the newly inserted element must be unique in the tree.
|
||||||
|
/// * All keys in the tree must remain in sorted order.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub unsafe fn insert_after_unchecked(&mut self, key: K, value: V) {
|
||||||
|
let edge = match self.current.take() {
|
||||||
|
None => {
|
||||||
|
// SAFETY: We have no other reference to the tree.
|
||||||
|
match unsafe { self.root.reborrow() } {
|
||||||
|
root @ None => {
|
||||||
|
// Tree is empty, allocate a new root.
|
||||||
|
let mut node = NodeRef::new_leaf(self.alloc.clone());
|
||||||
|
node.borrow_mut().push(key, value);
|
||||||
|
*root = Some(node.forget_type());
|
||||||
|
*self.length += 1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Some(root) => root.borrow_mut().first_leaf_edge(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(current) => current.next_leaf_edge(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let handle = edge.insert_recursing(key, value, self.alloc.clone(), |ins| {
|
||||||
|
drop(ins.left);
|
||||||
|
// SAFETY: The handle to the newly inserted value is always on a
|
||||||
|
// leaf node, so adding a new root node doesn't invalidate it.
|
||||||
|
let root = unsafe { self.root.reborrow().as_mut().unwrap() };
|
||||||
|
root.push_internal_level(self.alloc.clone()).push(ins.kv.0, ins.kv.1, ins.right)
|
||||||
|
});
|
||||||
|
self.current = handle.left_edge().next_back_kv().ok();
|
||||||
|
*self.length += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts a new element into the `BTreeMap` before the current one.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing at the "ghost" non-element then the new element is
|
||||||
|
/// inserted at the end of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// You must ensure that the `BTreeMap` invariants are maintained.
|
||||||
|
/// Specifically:
|
||||||
|
///
|
||||||
|
/// * The key of the newly inserted element must be unique in the tree.
|
||||||
|
/// * All keys in the tree must remain in sorted order.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub unsafe fn insert_before_unchecked(&mut self, key: K, value: V) {
|
||||||
|
let edge = match self.current.take() {
|
||||||
|
None => {
|
||||||
|
// SAFETY: We have no other reference to the tree.
|
||||||
|
match unsafe { self.root.reborrow() } {
|
||||||
|
root @ None => {
|
||||||
|
// Tree is empty, allocate a new root.
|
||||||
|
let mut node = NodeRef::new_leaf(self.alloc.clone());
|
||||||
|
node.borrow_mut().push(key, value);
|
||||||
|
*root = Some(node.forget_type());
|
||||||
|
*self.length += 1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Some(root) => root.borrow_mut().last_leaf_edge(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(current) => current.next_back_leaf_edge(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let handle = edge.insert_recursing(key, value, self.alloc.clone(), |ins| {
|
||||||
|
drop(ins.left);
|
||||||
|
// SAFETY: The handle to the newly inserted value is always on a
|
||||||
|
// leaf node, so adding a new root node doesn't invalidate it.
|
||||||
|
let root = unsafe { self.root.reborrow().as_mut().unwrap() };
|
||||||
|
root.push_internal_level(self.alloc.clone()).push(ins.kv.0, ins.kv.1, ins.right)
|
||||||
|
});
|
||||||
|
self.current = handle.right_edge().next_kv().ok();
|
||||||
|
*self.length += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts a new element into the `BTreeMap` after the current one.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing at the "ghost" non-element then the new element is
|
||||||
|
/// inserted at the front of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// This function panics if:
|
||||||
|
/// - the given key compares less than or equal to the current element (if
|
||||||
|
/// any).
|
||||||
|
/// - the given key compares greater than or equal to the next element (if
|
||||||
|
/// any).
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn insert_after(&mut self, key: K, value: V) {
|
||||||
|
if let Some(current) = self.key() {
|
||||||
|
if &key <= current {
|
||||||
|
panic!("key must be ordered above the current element");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some((next, _)) = self.peek_prev() {
|
||||||
|
if &key >= next {
|
||||||
|
panic!("key must be ordered below the next element");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
self.insert_after_unchecked(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inserts a new element into the `BTreeMap` before the current one.
|
||||||
|
///
|
||||||
|
/// If the cursor is pointing at the "ghost" non-element then the new element is
|
||||||
|
/// inserted at the end of the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// This function panics if:
|
||||||
|
/// - the given key compares greater than or equal to the current element
|
||||||
|
/// (if any).
|
||||||
|
/// - the given key compares less than or equal to the previous element (if
|
||||||
|
/// any).
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn insert_before(&mut self, key: K, value: V) {
|
||||||
|
if let Some(current) = self.key() {
|
||||||
|
if &key >= current {
|
||||||
|
panic!("key must be ordered below the current element");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some((prev, _)) = self.peek_prev() {
|
||||||
|
if &key <= prev {
|
||||||
|
panic!("key must be ordered above the previous element");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
self.insert_before_unchecked(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the current element from the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// The element that was removed is returned, and the cursor is
|
||||||
|
/// moved to point to the next element in the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is currently pointing to the "ghost" non-element then no element
|
||||||
|
/// is removed and `None` is returned. The cursor is not moved in this case.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn remove_current(&mut self) -> Option<(K, V)> {
|
||||||
|
let current = self.current.take()?;
|
||||||
|
let mut emptied_internal_root = false;
|
||||||
|
let (kv, pos) =
|
||||||
|
current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
|
||||||
|
self.current = pos.next_kv().ok();
|
||||||
|
*self.length -= 1;
|
||||||
|
if emptied_internal_root {
|
||||||
|
// SAFETY: This is safe since current does not point within the now
|
||||||
|
// empty root node.
|
||||||
|
let root = unsafe { self.root.reborrow().as_mut().unwrap() };
|
||||||
|
root.pop_internal_level(self.alloc.clone());
|
||||||
|
}
|
||||||
|
Some(kv)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Removes the current element from the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// The element that was removed is returned, and the cursor is
|
||||||
|
/// moved to point to the previous element in the `BTreeMap`.
|
||||||
|
///
|
||||||
|
/// If the cursor is currently pointing to the "ghost" non-element then no element
|
||||||
|
/// is removed and `None` is returned. The cursor is not moved in this case.
|
||||||
|
#[unstable(feature = "btree_cursors", issue = "107540")]
|
||||||
|
pub fn remove_current_and_move_back(&mut self) -> Option<(K, V)> {
|
||||||
|
let current = self.current.take()?;
|
||||||
|
let mut emptied_internal_root = false;
|
||||||
|
let (kv, pos) =
|
||||||
|
current.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
|
||||||
|
self.current = pos.next_back_kv().ok();
|
||||||
|
*self.length -= 1;
|
||||||
|
if emptied_internal_root {
|
||||||
|
// SAFETY: This is safe since current does not point within the now
|
||||||
|
// empty root node.
|
||||||
|
let root = unsafe { self.root.reborrow().as_mut().unwrap() };
|
||||||
|
root.pop_internal_level(self.alloc.clone());
|
||||||
|
}
|
||||||
|
Some(kv)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
@ -347,7 +347,7 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
|
||||||
/// assert_eq!(map["poneyland"], 37);
|
/// assert_eq!(map["poneyland"], 37);
|
||||||
/// ```
|
/// ```
|
||||||
#[stable(feature = "rust1", since = "1.0.0")]
|
#[stable(feature = "rust1", since = "1.0.0")]
|
||||||
pub fn insert(self, value: V) -> &'a mut V {
|
pub fn insert(mut self, value: V) -> &'a mut V {
|
||||||
let out_ptr = match self.handle {
|
let out_ptr = match self.handle {
|
||||||
None => {
|
None => {
|
||||||
// SAFETY: There is no tree yet so no reference to it exists.
|
// SAFETY: There is no tree yet so no reference to it exists.
|
||||||
|
@ -358,25 +358,27 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
|
||||||
map.length = 1;
|
map.length = 1;
|
||||||
val_ptr
|
val_ptr
|
||||||
}
|
}
|
||||||
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
|
Some(handle) => {
|
||||||
(None, val_ptr) => {
|
let new_handle =
|
||||||
// SAFETY: We have consumed self.handle.
|
handle.insert_recursing(self.key, value, self.alloc.clone(), |ins| {
|
||||||
let map = unsafe { self.dormant_map.awaken() };
|
drop(ins.left);
|
||||||
map.length += 1;
|
// SAFETY: Pushing a new root node doesn't invalidate
|
||||||
val_ptr
|
// handles to existing nodes.
|
||||||
}
|
let map = unsafe { self.dormant_map.reborrow() };
|
||||||
(Some(ins), val_ptr) => {
|
let root = map.root.as_mut().unwrap(); // same as ins.left
|
||||||
drop(ins.left);
|
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right)
|
||||||
// SAFETY: We have consumed self.handle and dropped the
|
});
|
||||||
// remaining reference to the tree, ins.left.
|
|
||||||
let map = unsafe { self.dormant_map.awaken() };
|
// Get the pointer to the value
|
||||||
let root = map.root.as_mut().unwrap(); // same as ins.left
|
let val_ptr = new_handle.into_val_mut();
|
||||||
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
|
|
||||||
map.length += 1;
|
// SAFETY: We have consumed self.handle.
|
||||||
val_ptr
|
let map = unsafe { self.dormant_map.awaken() };
|
||||||
}
|
map.length += 1;
|
||||||
},
|
val_ptr
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Now that we have finished growing the tree using borrowed references,
|
// Now that we have finished growing the tree using borrowed references,
|
||||||
// dereference the pointer to a part of it, that we picked up along the way.
|
// dereference the pointer to a part of it, that we picked up along the way.
|
||||||
unsafe { &mut *out_ptr }
|
unsafe { &mut *out_ptr }
|
||||||
|
|
|
@ -2336,3 +2336,52 @@ fn from_array() {
|
||||||
let unordered_duplicates = BTreeMap::from([(3, 4), (1, 2), (1, 2)]);
|
let unordered_duplicates = BTreeMap::from([(3, 4), (1, 2), (1, 2)]);
|
||||||
assert_eq!(map, unordered_duplicates);
|
assert_eq!(map, unordered_duplicates);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cursor() {
|
||||||
|
let map = BTreeMap::from([(1, 'a'), (2, 'b'), (3, 'c')]);
|
||||||
|
|
||||||
|
let mut cur = map.lower_bound(Bound::Unbounded);
|
||||||
|
assert_eq!(cur.key(), Some(&1));
|
||||||
|
cur.move_next();
|
||||||
|
assert_eq!(cur.key(), Some(&2));
|
||||||
|
assert_eq!(cur.peek_next(), Some((&3, &'c')));
|
||||||
|
cur.move_prev();
|
||||||
|
assert_eq!(cur.key(), Some(&1));
|
||||||
|
assert_eq!(cur.peek_prev(), None);
|
||||||
|
|
||||||
|
let mut cur = map.upper_bound(Bound::Excluded(&1));
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
cur.move_next();
|
||||||
|
assert_eq!(cur.key(), Some(&1));
|
||||||
|
cur.move_prev();
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
assert_eq!(cur.peek_prev(), Some((&3, &'c')));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cursor_mut() {
|
||||||
|
let mut map = BTreeMap::from([(1, 'a'), (3, 'c'), (5, 'e')]);
|
||||||
|
let mut cur = map.lower_bound_mut(Bound::Excluded(&3));
|
||||||
|
assert_eq!(cur.key(), Some(&5));
|
||||||
|
cur.insert_before(4, 'd');
|
||||||
|
assert_eq!(cur.key(), Some(&5));
|
||||||
|
assert_eq!(cur.peek_prev(), Some((&4, &mut 'd')));
|
||||||
|
cur.move_next();
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
cur.insert_before(6, 'f');
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
assert_eq!(cur.remove_current(), None);
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
cur.insert_after(0, '?');
|
||||||
|
assert_eq!(cur.key(), None);
|
||||||
|
assert_eq!(map, BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd'), (5, 'e'), (6, 'f')]));
|
||||||
|
|
||||||
|
let mut cur = map.upper_bound_mut(Bound::Included(&5));
|
||||||
|
assert_eq!(cur.key(), Some(&5));
|
||||||
|
assert_eq!(cur.remove_current(), Some((5, 'e')));
|
||||||
|
assert_eq!(cur.key(), Some(&6));
|
||||||
|
assert_eq!(cur.remove_current_and_move_back(), Some((6, 'f')));
|
||||||
|
assert_eq!(cur.key(), Some(&4));
|
||||||
|
assert_eq!(map, BTreeMap::from([(0, '?'), (1, 'a'), (3, 'c'), (4, 'd')]));
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ use core::ops::RangeBounds;
|
||||||
use core::ptr;
|
use core::ptr;
|
||||||
|
|
||||||
use super::node::{marker, ForceResult::*, Handle, NodeRef};
|
use super::node::{marker, ForceResult::*, Handle, NodeRef};
|
||||||
|
use super::search::SearchBound;
|
||||||
|
|
||||||
use crate::alloc::Allocator;
|
use crate::alloc::Allocator;
|
||||||
// `front` and `back` are always both `None` or both `Some`.
|
// `front` and `back` are always both `None` or both `Some`.
|
||||||
|
@ -386,7 +387,7 @@ impl<BorrowType: marker::BorrowType, K, V>
|
||||||
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
|
/// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
|
||||||
/// on the left side, which is either in the same leaf node or in an ancestor node.
|
/// on the left side, which is either in the same leaf node or in an ancestor node.
|
||||||
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
|
/// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
|
||||||
fn next_back_kv(
|
pub fn next_back_kv(
|
||||||
self,
|
self,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
|
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
|
||||||
|
@ -707,7 +708,9 @@ impl<BorrowType: marker::BorrowType, K, V>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the leaf edge closest to a KV for backward navigation.
|
/// Returns the leaf edge closest to a KV for backward navigation.
|
||||||
fn next_back_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
|
pub fn next_back_leaf_edge(
|
||||||
|
self,
|
||||||
|
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
|
||||||
match self.force() {
|
match self.force() {
|
||||||
Leaf(leaf_kv) => leaf_kv.left_edge(),
|
Leaf(leaf_kv) => leaf_kv.left_edge(),
|
||||||
Internal(internal_kv) => {
|
Internal(internal_kv) => {
|
||||||
|
@ -717,3 +720,51 @@ impl<BorrowType: marker::BorrowType, K, V>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<BorrowType: marker::BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
|
||||||
|
/// Returns the leaf edge corresponding to the first point at which the
|
||||||
|
/// given bound is true.
|
||||||
|
pub fn lower_bound<Q: ?Sized>(
|
||||||
|
self,
|
||||||
|
mut bound: SearchBound<&Q>,
|
||||||
|
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
|
||||||
|
where
|
||||||
|
Q: Ord,
|
||||||
|
K: Borrow<Q>,
|
||||||
|
{
|
||||||
|
let mut node = self;
|
||||||
|
loop {
|
||||||
|
let (edge, new_bound) = node.find_lower_bound_edge(bound);
|
||||||
|
match edge.force() {
|
||||||
|
Leaf(edge) => return edge,
|
||||||
|
Internal(edge) => {
|
||||||
|
node = edge.descend();
|
||||||
|
bound = new_bound;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the leaf edge corresponding to the last point at which the
|
||||||
|
/// given bound is true.
|
||||||
|
pub fn upper_bound<Q: ?Sized>(
|
||||||
|
self,
|
||||||
|
mut bound: SearchBound<&Q>,
|
||||||
|
) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>
|
||||||
|
where
|
||||||
|
Q: Ord,
|
||||||
|
K: Borrow<Q>,
|
||||||
|
{
|
||||||
|
let mut node = self;
|
||||||
|
loop {
|
||||||
|
let (edge, new_bound) = node.find_upper_bound_edge(bound);
|
||||||
|
match edge.force() {
|
||||||
|
Leaf(edge) => return edge,
|
||||||
|
Internal(edge) => {
|
||||||
|
node = edge.descend();
|
||||||
|
bound = new_bound;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -442,6 +442,24 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
|
||||||
// SAFETY: we have exclusive access to the entire node.
|
// SAFETY: we have exclusive access to the entire node.
|
||||||
unsafe { &mut *ptr }
|
unsafe { &mut *ptr }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a dormant copy of this node with its lifetime erased which can
|
||||||
|
/// be reawakened later.
|
||||||
|
pub fn dormant(&self) -> NodeRef<marker::DormantMut, K, V, Type> {
|
||||||
|
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V, Type> NodeRef<marker::DormantMut, K, V, Type> {
|
||||||
|
/// Revert to the unique borrow initially captured.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The reborrow must have ended, i.e., the reference returned by `new` and
|
||||||
|
/// all pointers and references derived from it, must not be used anymore.
|
||||||
|
pub unsafe fn awaken<'a>(self) -> NodeRef<marker::Mut<'a>, K, V, Type> {
|
||||||
|
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
|
impl<K, V, Type> NodeRef<marker::Dying, K, V, Type> {
|
||||||
|
@ -798,6 +816,25 @@ impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeT
|
||||||
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type
|
// We can't use Handle::new_kv or Handle::new_edge because we don't know our type
|
||||||
Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
|
Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a dormant copy of this handle which can be reawakened later.
|
||||||
|
///
|
||||||
|
/// See `DormantMutRef` for more details.
|
||||||
|
pub fn dormant(&self) -> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
|
||||||
|
Handle { node: self.node.dormant(), idx: self.idx, _marker: PhantomData }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V, NodeType, HandleType> Handle<NodeRef<marker::DormantMut, K, V, NodeType>, HandleType> {
|
||||||
|
/// Revert to the unique borrow initially captured.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The reborrow must have ended, i.e., the reference returned by `new` and
|
||||||
|
/// all pointers and references derived from it, must not be used anymore.
|
||||||
|
pub unsafe fn awaken<'a>(self) -> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
|
||||||
|
Handle { node: unsafe { self.node.awaken() }, idx: self.idx, _marker: PhantomData }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
|
impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
|
||||||
|
@ -851,9 +888,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
||||||
/// Inserts a new key-value pair between the key-value pairs to the right and left of
|
/// Inserts a new key-value pair between the key-value pairs to the right and left of
|
||||||
/// this edge. This method assumes that there is enough space in the node for the new
|
/// this edge. This method assumes that there is enough space in the node for the new
|
||||||
/// pair to fit.
|
/// pair to fit.
|
||||||
///
|
unsafe fn insert_fit(
|
||||||
/// The returned pointer points to the inserted value.
|
mut self,
|
||||||
fn insert_fit(&mut self, key: K, val: V) -> *mut V {
|
key: K,
|
||||||
|
val: V,
|
||||||
|
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
|
||||||
debug_assert!(self.node.len() < CAPACITY);
|
debug_assert!(self.node.len() < CAPACITY);
|
||||||
let new_len = self.node.len() + 1;
|
let new_len = self.node.len() + 1;
|
||||||
|
|
||||||
|
@ -862,7 +901,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
||||||
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
|
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
|
||||||
*self.node.len_mut() = new_len as u16;
|
*self.node.len_mut() = new_len as u16;
|
||||||
|
|
||||||
self.node.val_area_mut(self.idx).assume_init_mut()
|
Handle::new_kv(self.node, self.idx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -871,21 +910,26 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
||||||
/// Inserts a new key-value pair between the key-value pairs to the right and left of
|
/// Inserts a new key-value pair between the key-value pairs to the right and left of
|
||||||
/// this edge. This method splits the node if there isn't enough room.
|
/// this edge. This method splits the node if there isn't enough room.
|
||||||
///
|
///
|
||||||
/// The returned pointer points to the inserted value.
|
/// Returns a dormant handle to the inserted node which can be reawakened
|
||||||
|
/// once splitting is complete.
|
||||||
fn insert<A: Allocator + Clone>(
|
fn insert<A: Allocator + Clone>(
|
||||||
mut self,
|
self,
|
||||||
key: K,
|
key: K,
|
||||||
val: V,
|
val: V,
|
||||||
alloc: A,
|
alloc: A,
|
||||||
) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
|
) -> (
|
||||||
|
Option<SplitResult<'a, K, V, marker::Leaf>>,
|
||||||
|
Handle<NodeRef<marker::DormantMut, K, V, marker::Leaf>, marker::KV>,
|
||||||
|
) {
|
||||||
if self.node.len() < CAPACITY {
|
if self.node.len() < CAPACITY {
|
||||||
let val_ptr = self.insert_fit(key, val);
|
// SAFETY: There is enough space in the node for insertion.
|
||||||
(None, val_ptr)
|
let handle = unsafe { self.insert_fit(key, val) };
|
||||||
|
(None, handle.dormant())
|
||||||
} else {
|
} else {
|
||||||
let (middle_kv_idx, insertion) = splitpoint(self.idx);
|
let (middle_kv_idx, insertion) = splitpoint(self.idx);
|
||||||
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
|
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
|
||||||
let mut result = middle.split(alloc);
|
let mut result = middle.split(alloc);
|
||||||
let mut insertion_edge = match insertion {
|
let insertion_edge = match insertion {
|
||||||
LeftOrRight::Left(insert_idx) => unsafe {
|
LeftOrRight::Left(insert_idx) => unsafe {
|
||||||
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
|
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
|
||||||
},
|
},
|
||||||
|
@ -893,8 +937,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
||||||
Handle::new_edge(result.right.borrow_mut(), insert_idx)
|
Handle::new_edge(result.right.borrow_mut(), insert_idx)
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
let val_ptr = insertion_edge.insert_fit(key, val);
|
// SAFETY: We just split the node, so there is enough space for
|
||||||
(Some(result), val_ptr)
|
// insertion.
|
||||||
|
let handle = unsafe { insertion_edge.insert_fit(key, val).dormant() };
|
||||||
|
(Some(result), handle)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -976,21 +1022,31 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
|
||||||
key: K,
|
key: K,
|
||||||
value: V,
|
value: V,
|
||||||
alloc: A,
|
alloc: A,
|
||||||
) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
|
split_root: impl FnOnce(SplitResult<'a, K, V, marker::LeafOrInternal>),
|
||||||
let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) {
|
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
|
||||||
(None, val_ptr) => return (None, val_ptr),
|
let (mut split, handle) = match self.insert(key, value, alloc.clone()) {
|
||||||
(Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
|
// SAFETY: we have finished splitting and can now re-awaken the
|
||||||
|
// handle to the inserted element.
|
||||||
|
(None, handle) => return unsafe { handle.awaken() },
|
||||||
|
(Some(split), handle) => (split.forget_node_type(), handle),
|
||||||
};
|
};
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
split = match split.left.ascend() {
|
split = match split.left.ascend() {
|
||||||
Ok(parent) => {
|
Ok(parent) => {
|
||||||
match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
|
match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
|
||||||
None => return (None, val_ptr),
|
// SAFETY: we have finished splitting and can now re-awaken the
|
||||||
|
// handle to the inserted element.
|
||||||
|
None => return unsafe { handle.awaken() },
|
||||||
Some(split) => split.forget_node_type(),
|
Some(split) => split.forget_node_type(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr),
|
Err(root) => {
|
||||||
|
split_root(SplitResult { left: root, ..split });
|
||||||
|
// SAFETY: we have finished splitting and can now re-awaken the
|
||||||
|
// handle to the inserted element.
|
||||||
|
return unsafe { handle.awaken() };
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1043,6 +1099,14 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
|
||||||
let leaf = self.node.into_leaf_mut();
|
let leaf = self.node.into_leaf_mut();
|
||||||
unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
|
unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn into_kv_valmut(self) -> (&'a K, &'a mut V) {
|
||||||
|
debug_assert!(self.idx < self.node.len());
|
||||||
|
let leaf = self.node.into_leaf_mut();
|
||||||
|
let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() };
|
||||||
|
let v = unsafe { leaf.vals.get_unchecked_mut(self.idx).assume_init_mut() };
|
||||||
|
(k, v)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
|
impl<'a, K, V, NodeType> Handle<NodeRef<marker::ValMut<'a>, K, V, NodeType>, marker::KV> {
|
||||||
|
@ -1667,6 +1731,7 @@ pub mod marker {
|
||||||
|
|
||||||
pub enum Owned {}
|
pub enum Owned {}
|
||||||
pub enum Dying {}
|
pub enum Dying {}
|
||||||
|
pub enum DormantMut {}
|
||||||
pub struct Immut<'a>(PhantomData<&'a ()>);
|
pub struct Immut<'a>(PhantomData<&'a ()>);
|
||||||
pub struct Mut<'a>(PhantomData<&'a mut ()>);
|
pub struct Mut<'a>(PhantomData<&'a mut ()>);
|
||||||
pub struct ValMut<'a>(PhantomData<&'a mut ()>);
|
pub struct ValMut<'a>(PhantomData<&'a mut ()>);
|
||||||
|
@ -1688,6 +1753,7 @@ pub mod marker {
|
||||||
impl<'a> BorrowType for Immut<'a> {}
|
impl<'a> BorrowType for Immut<'a> {}
|
||||||
impl<'a> BorrowType for Mut<'a> {}
|
impl<'a> BorrowType for Mut<'a> {}
|
||||||
impl<'a> BorrowType for ValMut<'a> {}
|
impl<'a> BorrowType for ValMut<'a> {}
|
||||||
|
impl BorrowType for DormantMut {}
|
||||||
|
|
||||||
pub enum KV {}
|
pub enum KV {}
|
||||||
pub enum Edge {}
|
pub enum Edge {}
|
||||||
|
|
|
@ -102,7 +102,7 @@ enum ErrorData<C> {
|
||||||
/// portability.
|
/// portability.
|
||||||
///
|
///
|
||||||
/// [`into`]: Into::into
|
/// [`into`]: Into::into
|
||||||
#[unstable(feature = "raw_os_error_ty", issue = "none")]
|
#[unstable(feature = "raw_os_error_ty", issue = "107792")]
|
||||||
pub type RawOsError = i32;
|
pub type RawOsError = i32;
|
||||||
|
|
||||||
// `#[repr(align(4))]` is probably redundant, it should have that value or
|
// `#[repr(align(4))]` is probably redundant, it should have that value or
|
||||||
|
|
|
@ -262,7 +262,7 @@ use crate::sys_common::memchr;
|
||||||
|
|
||||||
#[stable(feature = "bufwriter_into_parts", since = "1.56.0")]
|
#[stable(feature = "bufwriter_into_parts", since = "1.56.0")]
|
||||||
pub use self::buffered::WriterPanicked;
|
pub use self::buffered::WriterPanicked;
|
||||||
#[unstable(feature = "raw_os_error_ty", issue = "none")]
|
#[unstable(feature = "raw_os_error_ty", issue = "107792")]
|
||||||
pub use self::error::RawOsError;
|
pub use self::error::RawOsError;
|
||||||
pub(crate) use self::stdio::attempt_print_to_stderr;
|
pub(crate) use self::stdio::attempt_print_to_stderr;
|
||||||
#[unstable(feature = "internal_output_capture", issue = "none")]
|
#[unstable(feature = "internal_output_capture", issue = "none")]
|
||||||
|
|
|
@ -222,7 +222,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
|
||||||
fn after_krate(&mut self) -> Result<(), Error> {
|
fn after_krate(&mut self) -> Result<(), Error> {
|
||||||
debug!("Done with crate");
|
debug!("Done with crate");
|
||||||
|
|
||||||
debug!("Adding Primitve impls");
|
debug!("Adding Primitive impls");
|
||||||
for primitive in Rc::clone(&self.cache).primitive_locations.values() {
|
for primitive in Rc::clone(&self.cache).primitive_locations.values() {
|
||||||
self.get_impls(*primitive);
|
self.get_impls(*primitive);
|
||||||
}
|
}
|
||||||
|
|
|
@ -626,7 +626,7 @@ fn test_item() {
|
||||||
stringify_item!(
|
stringify_item!(
|
||||||
impl ~const Struct {}
|
impl ~const Struct {}
|
||||||
),
|
),
|
||||||
"impl Struct {}", // FIXME
|
"impl ~const Struct {}",
|
||||||
);
|
);
|
||||||
|
|
||||||
// ItemKind::MacCall
|
// ItemKind::MacCall
|
||||||
|
@ -838,7 +838,7 @@ fn test_ty() {
|
||||||
assert_eq!(stringify_ty!(dyn Send + 'a), "dyn Send + 'a");
|
assert_eq!(stringify_ty!(dyn Send + 'a), "dyn Send + 'a");
|
||||||
assert_eq!(stringify_ty!(dyn 'a + Send), "dyn 'a + Send");
|
assert_eq!(stringify_ty!(dyn 'a + Send), "dyn 'a + Send");
|
||||||
assert_eq!(stringify_ty!(dyn ?Sized), "dyn ?Sized");
|
assert_eq!(stringify_ty!(dyn ?Sized), "dyn ?Sized");
|
||||||
assert_eq!(stringify_ty!(dyn ~const Clone), "dyn Clone"); // FIXME
|
assert_eq!(stringify_ty!(dyn ~const Clone), "dyn ~const Clone");
|
||||||
assert_eq!(stringify_ty!(dyn for<'a> Send), "dyn for<'a> Send");
|
assert_eq!(stringify_ty!(dyn for<'a> Send), "dyn for<'a> Send");
|
||||||
|
|
||||||
// TyKind::ImplTrait
|
// TyKind::ImplTrait
|
||||||
|
@ -846,7 +846,7 @@ fn test_ty() {
|
||||||
assert_eq!(stringify_ty!(impl Send + 'a), "impl Send + 'a");
|
assert_eq!(stringify_ty!(impl Send + 'a), "impl Send + 'a");
|
||||||
assert_eq!(stringify_ty!(impl 'a + Send), "impl 'a + Send");
|
assert_eq!(stringify_ty!(impl 'a + Send), "impl 'a + Send");
|
||||||
assert_eq!(stringify_ty!(impl ?Sized), "impl ?Sized");
|
assert_eq!(stringify_ty!(impl ?Sized), "impl ?Sized");
|
||||||
assert_eq!(stringify_ty!(impl ~const Clone), "impl Clone"); // FIXME
|
assert_eq!(stringify_ty!(impl ~const Clone), "impl ~const Clone");
|
||||||
assert_eq!(stringify_ty!(impl for<'a> Send), "impl for<'a> Send");
|
assert_eq!(stringify_ty!(impl for<'a> Send), "impl for<'a> Send");
|
||||||
|
|
||||||
// TyKind::Paren
|
// TyKind::Paren
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(derive_const)]
|
||||||
|
#![feature(const_trait_impl)]
|
||||||
|
|
||||||
|
#[derive_const(PartialEq)]
|
||||||
|
pub struct Reverse<T>(T);
|
||||||
|
|
||||||
|
const fn foo(a: Reverse<i32>, b: Reverse<i32>) -> bool {
|
||||||
|
a == b
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
4
tests/ui/unpretty/ast-const-trait-bound.rs
Normal file
4
tests/ui/unpretty/ast-const-trait-bound.rs
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
// compile-flags: -Zunpretty=normal
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
fn foo() where T: ~const Bar {}
|
4
tests/ui/unpretty/ast-const-trait-bound.stdout
Normal file
4
tests/ui/unpretty/ast-const-trait-bound.stdout
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
// compile-flags: -Zunpretty=normal
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
fn foo() where T: ~const Bar {}
|
Loading…
Add table
Add a link
Reference in a new issue