Auto merge of #134439 - matthiaskrgr:rollup-grmmmx2, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #133265 (Add a range argument to vec.extract_if) - #133801 (Promote powerpc64le-unknown-linux-musl to tier 2 with host tools) - #134323 (coverage: Dismantle `map_data.rs` by moving its responsibilities elsewhere) - #134378 (An octuple of polonius fact generation cleanups) - #134408 (Regression test for RPIT inheriting lifetime from projection) - #134423 (bootstrap: use specific-purpose ui test path for `test_valid` self-test) - #134426 (Fix typo in uint_macros.rs) Failed merges: - #133103 (Pass FnAbi to find_mir_or_eval_fn) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
52890e8215
40 changed files with 612 additions and 550 deletions
|
@ -124,6 +124,7 @@ pub(crate) fn compute_regions<'a, 'tcx>(
|
|||
borrow_set,
|
||||
move_data,
|
||||
&universal_region_relations,
|
||||
&constraints,
|
||||
);
|
||||
|
||||
let mut regioncx = RegionInferenceContext::new(
|
||||
|
|
85
compiler/rustc_borrowck/src/polonius/legacy/accesses.rs
Normal file
85
compiler/rustc_borrowck/src/polonius/legacy/accesses.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::{Body, Local, Location, Place};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_mir_dataflow::move_paths::{LookupResult, MoveData};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::def_use::{self, DefUse};
|
||||
use crate::facts::AllFacts;
|
||||
use crate::location::{LocationIndex, LocationTable};
|
||||
use crate::universal_regions::UniversalRegions;
|
||||
|
||||
/// Emit polonius facts for variable defs, uses, drops, and path accesses.
|
||||
pub(crate) fn emit_access_facts<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
facts: &mut AllFacts,
|
||||
body: &Body<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
move_data: &MoveData<'tcx>,
|
||||
universal_regions: &UniversalRegions<'tcx>,
|
||||
) {
|
||||
let mut extractor = AccessFactsExtractor { facts, move_data, location_table };
|
||||
extractor.visit_body(body);
|
||||
|
||||
for (local, local_decl) in body.local_decls.iter_enumerated() {
|
||||
debug!("add use_of_var_derefs_origin facts - local={:?}, type={:?}", local, local_decl.ty);
|
||||
tcx.for_each_free_region(&local_decl.ty, |region| {
|
||||
let region_vid = universal_regions.to_region_vid(region);
|
||||
facts.use_of_var_derefs_origin.push((local, region_vid.into()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// MIR visitor extracting point-wise facts about accesses.
|
||||
struct AccessFactsExtractor<'a, 'tcx> {
|
||||
facts: &'a mut AllFacts,
|
||||
move_data: &'a MoveData<'tcx>,
|
||||
location_table: &'a LocationTable,
|
||||
}
|
||||
|
||||
impl<'tcx> AccessFactsExtractor<'_, 'tcx> {
|
||||
fn location_to_index(&self, location: Location) -> LocationIndex {
|
||||
self.location_table.mid_index(location)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for AccessFactsExtractor<'a, 'tcx> {
|
||||
fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) {
|
||||
match def_use::categorize(context) {
|
||||
Some(DefUse::Def) => {
|
||||
debug!("AccessFactsExtractor - emit def");
|
||||
self.facts.var_defined_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
Some(DefUse::Use) => {
|
||||
debug!("AccessFactsExtractor - emit use");
|
||||
self.facts.var_used_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
Some(DefUse::Drop) => {
|
||||
debug!("AccessFactsExtractor - emit drop");
|
||||
self.facts.var_dropped_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
|
||||
self.super_place(place, context, location);
|
||||
|
||||
match context {
|
||||
PlaceContext::NonMutatingUse(_)
|
||||
| PlaceContext::MutatingUse(MutatingUseContext::Borrow) => {
|
||||
let path = match self.move_data.rev_lookup.find(place.as_ref()) {
|
||||
LookupResult::Exact(path) | LookupResult::Parent(Some(path)) => path,
|
||||
_ => {
|
||||
// There's no path access to emit.
|
||||
return;
|
||||
}
|
||||
};
|
||||
debug!("AccessFactsExtractor - emit path access ({path:?}, {location:?})");
|
||||
self.facts.path_accessed_at_base.push((path, self.location_to_index(location)));
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -21,22 +21,22 @@ use crate::{
|
|||
/// Emit `loan_invalidated_at` facts.
|
||||
pub(super) fn emit_loan_invalidations<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
all_facts: &mut AllFacts,
|
||||
location_table: &LocationTable,
|
||||
facts: &mut AllFacts,
|
||||
body: &Body<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
) {
|
||||
let dominators = body.basic_blocks.dominators();
|
||||
let mut visitor =
|
||||
LoanInvalidationsGenerator { all_facts, borrow_set, tcx, location_table, body, dominators };
|
||||
LoanInvalidationsGenerator { facts, borrow_set, tcx, location_table, body, dominators };
|
||||
visitor.visit_body(body);
|
||||
}
|
||||
|
||||
struct LoanInvalidationsGenerator<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
all_facts: &'a mut AllFacts,
|
||||
location_table: &'a LocationTable,
|
||||
facts: &'a mut AllFacts,
|
||||
body: &'a Body<'tcx>,
|
||||
location_table: &'a LocationTable,
|
||||
dominators: &'a Dominators<BasicBlock>,
|
||||
borrow_set: &'a BorrowSet<'tcx>,
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> {
|
|||
let resume = self.location_table.start_index(resume.start_location());
|
||||
for (i, data) in borrow_set.iter_enumerated() {
|
||||
if borrow_of_local_data(data.borrowed_place) {
|
||||
self.all_facts.loan_invalidated_at.push((resume, i));
|
||||
self.facts.loan_invalidated_at.push((resume, i));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> {
|
|||
let start = self.location_table.start_index(location);
|
||||
for (i, data) in borrow_set.iter_enumerated() {
|
||||
if borrow_of_local_data(data.borrowed_place) {
|
||||
self.all_facts.loan_invalidated_at.push((start, i));
|
||||
self.facts.loan_invalidated_at.push((start, i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -409,7 +409,7 @@ impl<'a, 'tcx> LoanInvalidationsGenerator<'a, 'tcx> {
|
|||
/// Generates a new `loan_invalidated_at(L, B)` fact.
|
||||
fn emit_loan_invalidated_at(&mut self, b: BorrowIndex, l: Location) {
|
||||
let lidx = self.location_table.start_index(l);
|
||||
self.all_facts.loan_invalidated_at.push((lidx, b));
|
||||
self.facts.loan_invalidated_at.push((lidx, b));
|
||||
}
|
||||
|
||||
fn check_activations(&mut self, location: Location) {
|
||||
|
|
|
@ -14,12 +14,12 @@ use crate::places_conflict;
|
|||
/// Emit `loan_killed_at` and `cfg_edge` facts at the same time.
|
||||
pub(super) fn emit_loan_kills<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
all_facts: &mut AllFacts,
|
||||
location_table: &LocationTable,
|
||||
facts: &mut AllFacts,
|
||||
body: &Body<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
) {
|
||||
let mut visitor = LoanKillsGenerator { borrow_set, tcx, location_table, all_facts, body };
|
||||
let mut visitor = LoanKillsGenerator { borrow_set, tcx, location_table, facts, body };
|
||||
for (bb, data) in body.basic_blocks.iter_enumerated() {
|
||||
visitor.visit_basic_block_data(bb, data);
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ pub(super) fn emit_loan_kills<'tcx>(
|
|||
|
||||
struct LoanKillsGenerator<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
all_facts: &'a mut AllFacts,
|
||||
facts: &'a mut AllFacts,
|
||||
location_table: &'a LocationTable,
|
||||
borrow_set: &'a BorrowSet<'tcx>,
|
||||
body: &'a Body<'tcx>,
|
||||
|
@ -36,12 +36,12 @@ struct LoanKillsGenerator<'a, 'tcx> {
|
|||
impl<'a, 'tcx> Visitor<'tcx> for LoanKillsGenerator<'a, 'tcx> {
|
||||
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
|
||||
// Also record CFG facts here.
|
||||
self.all_facts.cfg_edge.push((
|
||||
self.facts.cfg_edge.push((
|
||||
self.location_table.start_index(location),
|
||||
self.location_table.mid_index(location),
|
||||
));
|
||||
|
||||
self.all_facts.cfg_edge.push((
|
||||
self.facts.cfg_edge.push((
|
||||
self.location_table.mid_index(location),
|
||||
self.location_table.start_index(location.successor_within_block()),
|
||||
));
|
||||
|
@ -63,15 +63,15 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanKillsGenerator<'a, 'tcx> {
|
|||
|
||||
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
|
||||
// Also record CFG facts here.
|
||||
self.all_facts.cfg_edge.push((
|
||||
self.facts.cfg_edge.push((
|
||||
self.location_table.start_index(location),
|
||||
self.location_table.mid_index(location),
|
||||
));
|
||||
|
||||
let successor_blocks = terminator.successors();
|
||||
self.all_facts.cfg_edge.reserve(successor_blocks.size_hint().0);
|
||||
self.facts.cfg_edge.reserve(successor_blocks.size_hint().0);
|
||||
for successor_block in successor_blocks {
|
||||
self.all_facts.cfg_edge.push((
|
||||
self.facts.cfg_edge.push((
|
||||
self.location_table.mid_index(location),
|
||||
self.location_table.start_index(successor_block.start_location()),
|
||||
));
|
||||
|
@ -128,7 +128,7 @@ impl<'tcx> LoanKillsGenerator<'_, 'tcx> {
|
|||
|
||||
if places_conflict {
|
||||
let location_index = self.location_table.mid_index(location);
|
||||
self.all_facts.loan_killed_at.push((borrow_index, location_index));
|
||||
self.facts.loan_killed_at.push((borrow_index, location_index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -140,9 +140,9 @@ impl<'tcx> LoanKillsGenerator<'_, 'tcx> {
|
|||
fn record_killed_borrows_for_local(&mut self, local: Local, location: Location) {
|
||||
if let Some(borrow_indices) = self.borrow_set.local_map.get(&local) {
|
||||
let location_index = self.location_table.mid_index(location);
|
||||
self.all_facts.loan_killed_at.reserve(borrow_indices.len());
|
||||
self.facts.loan_killed_at.reserve(borrow_indices.len());
|
||||
for &borrow_index in borrow_indices {
|
||||
self.all_facts.loan_killed_at.push((borrow_index, location_index));
|
||||
self.facts.loan_killed_at.push((borrow_index, location_index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,16 +3,23 @@
|
|||
//! Will be removed in the future, once the in-tree `-Zpolonius=next` implementation reaches feature
|
||||
//! parity.
|
||||
|
||||
use rustc_middle::mir::{Body, LocalKind, Location, START_BLOCK};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use std::iter;
|
||||
|
||||
use either::Either;
|
||||
use rustc_middle::mir::{Body, Local, LocalKind, Location, START_BLOCK};
|
||||
use rustc_middle::ty::{GenericArg, TyCtxt};
|
||||
use rustc_mir_dataflow::move_paths::{InitKind, InitLocation, MoveData};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::borrow_set::BorrowSet;
|
||||
use crate::constraints::OutlivesConstraint;
|
||||
use crate::facts::{AllFacts, PoloniusRegionVid};
|
||||
use crate::location::LocationTable;
|
||||
use crate::type_check::MirTypeckRegionConstraints;
|
||||
use crate::type_check::free_region_relations::UniversalRegionRelations;
|
||||
use crate::universal_regions::UniversalRegions;
|
||||
|
||||
mod accesses;
|
||||
mod loan_invalidations;
|
||||
mod loan_kills;
|
||||
|
||||
|
@ -22,6 +29,8 @@ mod loan_kills;
|
|||
/// - CFG points and edges
|
||||
/// - loan kills
|
||||
/// - loan invalidations
|
||||
/// - access facts such as variable definitions, uses, drops, and path accesses
|
||||
/// - outlives constraints
|
||||
///
|
||||
/// The rest of the facts are emitted during typeck and liveness.
|
||||
pub(crate) fn emit_facts<'tcx>(
|
||||
|
@ -30,34 +39,42 @@ pub(crate) fn emit_facts<'tcx>(
|
|||
location_table: &LocationTable,
|
||||
body: &Body<'tcx>,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
move_data: &MoveData<'_>,
|
||||
universal_region_relations: &UniversalRegionRelations<'_>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
universal_region_relations: &UniversalRegionRelations<'tcx>,
|
||||
constraints: &MirTypeckRegionConstraints<'tcx>,
|
||||
) {
|
||||
let Some(all_facts) = all_facts else {
|
||||
let Some(facts) = all_facts else {
|
||||
// We don't do anything if there are no facts to fill.
|
||||
return;
|
||||
};
|
||||
let _prof_timer = tcx.prof.generic_activity("polonius_fact_generation");
|
||||
emit_move_facts(all_facts, move_data, location_table, body);
|
||||
emit_universal_region_facts(all_facts, borrow_set, universal_region_relations);
|
||||
emit_cfg_and_loan_kills_facts(all_facts, tcx, location_table, body, borrow_set);
|
||||
emit_loan_invalidations_facts(all_facts, tcx, location_table, body, borrow_set);
|
||||
emit_move_facts(facts, body, location_table, move_data);
|
||||
emit_universal_region_facts(facts, borrow_set, universal_region_relations);
|
||||
loan_kills::emit_loan_kills(tcx, facts, body, location_table, borrow_set);
|
||||
loan_invalidations::emit_loan_invalidations(tcx, facts, body, location_table, borrow_set);
|
||||
accesses::emit_access_facts(
|
||||
tcx,
|
||||
facts,
|
||||
body,
|
||||
location_table,
|
||||
move_data,
|
||||
&universal_region_relations.universal_regions,
|
||||
);
|
||||
emit_outlives_facts(facts, location_table, constraints);
|
||||
}
|
||||
|
||||
/// Emit facts needed for move/init analysis: moves and assignments.
|
||||
fn emit_move_facts(
|
||||
all_facts: &mut AllFacts,
|
||||
move_data: &MoveData<'_>,
|
||||
location_table: &LocationTable,
|
||||
facts: &mut AllFacts,
|
||||
body: &Body<'_>,
|
||||
location_table: &LocationTable,
|
||||
move_data: &MoveData<'_>,
|
||||
) {
|
||||
all_facts
|
||||
.path_is_var
|
||||
.extend(move_data.rev_lookup.iter_locals_enumerated().map(|(l, r)| (r, l)));
|
||||
facts.path_is_var.extend(move_data.rev_lookup.iter_locals_enumerated().map(|(l, r)| (r, l)));
|
||||
|
||||
for (child, move_path) in move_data.move_paths.iter_enumerated() {
|
||||
if let Some(parent) = move_path.parent {
|
||||
all_facts.child_path.push((child, parent));
|
||||
facts.child_path.push((child, parent));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,14 +100,14 @@ fn emit_move_facts(
|
|||
// The initialization happened in (or rather, when arriving at)
|
||||
// the successors, but not in the unwind block.
|
||||
let first_statement = Location { block: successor, statement_index: 0 };
|
||||
all_facts
|
||||
facts
|
||||
.path_assigned_at_base
|
||||
.push((init.path, location_table.start_index(first_statement)));
|
||||
}
|
||||
} else {
|
||||
// In all other cases, the initialization just happens at the
|
||||
// midpoint, like any other effect.
|
||||
all_facts
|
||||
facts
|
||||
.path_assigned_at_base
|
||||
.push((init.path, location_table.mid_index(location)));
|
||||
}
|
||||
|
@ -98,7 +115,7 @@ fn emit_move_facts(
|
|||
// Arguments are initialized on function entry
|
||||
InitLocation::Argument(local) => {
|
||||
assert!(body.local_kind(local) == LocalKind::Arg);
|
||||
all_facts.path_assigned_at_base.push((init.path, fn_entry_start));
|
||||
facts.path_assigned_at_base.push((init.path, fn_entry_start));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -107,20 +124,20 @@ fn emit_move_facts(
|
|||
if body.local_kind(local) != LocalKind::Arg {
|
||||
// Non-arguments start out deinitialised; we simulate this with an
|
||||
// initial move:
|
||||
all_facts.path_moved_at_base.push((path, fn_entry_start));
|
||||
facts.path_moved_at_base.push((path, fn_entry_start));
|
||||
}
|
||||
}
|
||||
|
||||
// moved_out_at
|
||||
// deinitialisation is assumed to always happen!
|
||||
all_facts
|
||||
facts
|
||||
.path_moved_at_base
|
||||
.extend(move_data.moves.iter().map(|mo| (mo.path, location_table.mid_index(mo.source))));
|
||||
}
|
||||
|
||||
/// Emit universal regions facts, and their relations.
|
||||
fn emit_universal_region_facts(
|
||||
all_facts: &mut AllFacts,
|
||||
facts: &mut AllFacts,
|
||||
borrow_set: &BorrowSet<'_>,
|
||||
universal_region_relations: &UniversalRegionRelations<'_>,
|
||||
) {
|
||||
|
@ -131,7 +148,7 @@ fn emit_universal_region_facts(
|
|||
// added to the existing number of loans, as if they succeeded them in the set.
|
||||
//
|
||||
let universal_regions = &universal_region_relations.universal_regions;
|
||||
all_facts
|
||||
facts
|
||||
.universal_region
|
||||
.extend(universal_regions.universal_regions_iter().map(PoloniusRegionVid::from));
|
||||
let borrow_count = borrow_set.len();
|
||||
|
@ -144,7 +161,7 @@ fn emit_universal_region_facts(
|
|||
for universal_region in universal_regions.universal_regions_iter() {
|
||||
let universal_region_idx = universal_region.index();
|
||||
let placeholder_loan_idx = borrow_count + universal_region_idx;
|
||||
all_facts.placeholder.push((universal_region.into(), placeholder_loan_idx.into()));
|
||||
facts.placeholder.push((universal_region.into(), placeholder_loan_idx.into()));
|
||||
}
|
||||
|
||||
// 2: the universal region relations `outlives` constraints are emitted as
|
||||
|
@ -156,29 +173,51 @@ fn emit_universal_region_facts(
|
|||
fr1={:?}, fr2={:?}",
|
||||
fr1, fr2
|
||||
);
|
||||
all_facts.known_placeholder_subset.push((fr1.into(), fr2.into()));
|
||||
facts.known_placeholder_subset.push((fr1.into(), fr2.into()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Emit facts about loan invalidations.
|
||||
fn emit_loan_invalidations_facts<'tcx>(
|
||||
all_facts: &mut AllFacts,
|
||||
/// For every potentially drop()-touched region `region` in `local`'s type
|
||||
/// (`kind`), emit a `drop_of_var_derefs_origin(local, origin)` fact.
|
||||
pub(crate) fn emit_drop_facts<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
body: &Body<'tcx>,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
local: Local,
|
||||
kind: &GenericArg<'tcx>,
|
||||
universal_regions: &UniversalRegions<'tcx>,
|
||||
all_facts: &mut Option<AllFacts>,
|
||||
) {
|
||||
loan_invalidations::emit_loan_invalidations(tcx, all_facts, location_table, body, borrow_set);
|
||||
debug!("emit_drop_facts(local={:?}, kind={:?}", local, kind);
|
||||
let Some(facts) = all_facts.as_mut() else { return };
|
||||
let _prof_timer = tcx.prof.generic_activity("polonius_fact_generation");
|
||||
tcx.for_each_free_region(kind, |drop_live_region| {
|
||||
let region_vid = universal_regions.to_region_vid(drop_live_region);
|
||||
facts.drop_of_var_derefs_origin.push((local, region_vid.into()));
|
||||
});
|
||||
}
|
||||
|
||||
/// Emit facts about CFG points and edges, as well as locations where loans are killed.
|
||||
fn emit_cfg_and_loan_kills_facts<'tcx>(
|
||||
all_facts: &mut AllFacts,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
/// Emit facts about the outlives constraints: the `subset` base relation, i.e. not a transitive
|
||||
/// closure.
|
||||
fn emit_outlives_facts<'tcx>(
|
||||
facts: &mut AllFacts,
|
||||
location_table: &LocationTable,
|
||||
body: &Body<'tcx>,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
constraints: &MirTypeckRegionConstraints<'tcx>,
|
||||
) {
|
||||
loan_kills::emit_loan_kills(tcx, all_facts, location_table, body, borrow_set);
|
||||
facts.subset_base.extend(constraints.outlives_constraints.outlives().iter().flat_map(
|
||||
|constraint: &OutlivesConstraint<'_>| {
|
||||
if let Some(from_location) = constraint.locations.from_location() {
|
||||
Either::Left(iter::once((
|
||||
constraint.sup.into(),
|
||||
constraint.sub.into(),
|
||||
location_table.mid_index(from_location),
|
||||
)))
|
||||
} else {
|
||||
Either::Right(
|
||||
location_table.all_points().map(move |location| {
|
||||
(constraint.sup.into(), constraint.sub.into(), location)
|
||||
}),
|
||||
)
|
||||
}
|
||||
},
|
||||
));
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ use crate::region_infer::values::LivenessValues;
|
|||
use crate::universal_regions::UniversalRegions;
|
||||
|
||||
mod local_use_map;
|
||||
mod polonius;
|
||||
mod trace;
|
||||
|
||||
/// Combines liveness analysis with initialization analysis to
|
||||
|
@ -45,8 +44,6 @@ pub(super) fn generate<'a, 'tcx>(
|
|||
let (relevant_live_locals, boring_locals) =
|
||||
compute_relevant_live_locals(typeck.tcx(), &free_regions, body);
|
||||
|
||||
polonius::emit_access_facts(typeck, body, move_data);
|
||||
|
||||
trace::trace(
|
||||
typeck,
|
||||
body,
|
||||
|
|
|
@ -1,123 +0,0 @@
|
|||
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::{Body, Local, Location, Place};
|
||||
use rustc_middle::ty::GenericArg;
|
||||
use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
|
||||
use tracing::debug;
|
||||
|
||||
use super::TypeChecker;
|
||||
use crate::def_use::{self, DefUse};
|
||||
use crate::location::{LocationIndex, LocationTable};
|
||||
|
||||
type VarPointRelation = Vec<(Local, LocationIndex)>;
|
||||
type PathPointRelation = Vec<(MovePathIndex, LocationIndex)>;
|
||||
|
||||
/// Emit polonius facts for variable defs, uses, drops, and path accesses.
|
||||
pub(super) fn emit_access_facts<'a, 'tcx>(
|
||||
typeck: &mut TypeChecker<'a, 'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
) {
|
||||
if let Some(facts) = typeck.all_facts.as_mut() {
|
||||
debug!("emit_access_facts()");
|
||||
|
||||
let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
||||
let location_table = typeck.location_table;
|
||||
|
||||
let mut extractor = AccessFactsExtractor {
|
||||
var_defined_at: &mut facts.var_defined_at,
|
||||
var_used_at: &mut facts.var_used_at,
|
||||
var_dropped_at: &mut facts.var_dropped_at,
|
||||
path_accessed_at_base: &mut facts.path_accessed_at_base,
|
||||
location_table,
|
||||
move_data,
|
||||
};
|
||||
extractor.visit_body(body);
|
||||
|
||||
for (local, local_decl) in body.local_decls.iter_enumerated() {
|
||||
debug!(
|
||||
"add use_of_var_derefs_origin facts - local={:?}, type={:?}",
|
||||
local, local_decl.ty
|
||||
);
|
||||
let universal_regions = &typeck.universal_regions;
|
||||
typeck.infcx.tcx.for_each_free_region(&local_decl.ty, |region| {
|
||||
let region_vid = universal_regions.to_region_vid(region);
|
||||
facts.use_of_var_derefs_origin.push((local, region_vid.into()));
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// For every potentially drop()-touched region `region` in `local`'s type
|
||||
/// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact.
|
||||
pub(super) fn emit_drop_facts<'tcx>(
|
||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||
local: Local,
|
||||
kind: &GenericArg<'tcx>,
|
||||
) {
|
||||
debug!("emit_drop_facts(local={:?}, kind={:?}", local, kind);
|
||||
if let Some(facts) = typeck.all_facts.as_mut() {
|
||||
let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
||||
let universal_regions = &typeck.universal_regions;
|
||||
typeck.infcx.tcx.for_each_free_region(kind, |drop_live_region| {
|
||||
let region_vid = universal_regions.to_region_vid(drop_live_region);
|
||||
facts.drop_of_var_derefs_origin.push((local, region_vid.into()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// MIR visitor extracting point-wise facts about accesses.
|
||||
struct AccessFactsExtractor<'a, 'tcx> {
|
||||
var_defined_at: &'a mut VarPointRelation,
|
||||
var_used_at: &'a mut VarPointRelation,
|
||||
location_table: &'a LocationTable,
|
||||
var_dropped_at: &'a mut VarPointRelation,
|
||||
move_data: &'a MoveData<'tcx>,
|
||||
path_accessed_at_base: &'a mut PathPointRelation,
|
||||
}
|
||||
|
||||
impl<'tcx> AccessFactsExtractor<'_, 'tcx> {
|
||||
fn location_to_index(&self, location: Location) -> LocationIndex {
|
||||
self.location_table.mid_index(location)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for AccessFactsExtractor<'a, 'tcx> {
|
||||
fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) {
|
||||
match def_use::categorize(context) {
|
||||
Some(DefUse::Def) => {
|
||||
debug!("AccessFactsExtractor - emit def");
|
||||
self.var_defined_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
Some(DefUse::Use) => {
|
||||
debug!("AccessFactsExtractor - emit use");
|
||||
self.var_used_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
Some(DefUse::Drop) => {
|
||||
debug!("AccessFactsExtractor - emit drop");
|
||||
self.var_dropped_at.push((local, self.location_to_index(location)));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
|
||||
self.super_place(place, context, location);
|
||||
|
||||
match context {
|
||||
PlaceContext::NonMutatingUse(_)
|
||||
| PlaceContext::MutatingUse(MutatingUseContext::Borrow) => {
|
||||
let path = match self.move_data.rev_lookup.find(place.as_ref()) {
|
||||
LookupResult::Exact(path) | LookupResult::Parent(Some(path)) => path,
|
||||
_ => {
|
||||
// There's no path access to emit.
|
||||
return;
|
||||
}
|
||||
};
|
||||
debug!("AccessFactsExtractor - emit path access ({path:?}, {location:?})");
|
||||
self.path_accessed_at_base.push((path, self.location_to_index(location)));
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -15,9 +15,9 @@ use rustc_trait_selection::traits::query::type_op::{DropckOutlives, TypeOp, Type
|
|||
use tracing::debug;
|
||||
|
||||
use crate::location::RichLocation;
|
||||
use crate::polonius;
|
||||
use crate::region_infer::values::{self, LiveLoans};
|
||||
use crate::type_check::liveness::local_use_map::LocalUseMap;
|
||||
use crate::type_check::liveness::polonius;
|
||||
use crate::type_check::{NormalizeLocation, TypeChecker};
|
||||
|
||||
/// This is the heart of the liveness computation. For each variable X
|
||||
|
@ -590,7 +590,13 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||
// the destructor and must be live at this point.
|
||||
for &kind in &drop_data.dropck_result.kinds {
|
||||
Self::make_all_regions_live(self.elements, self.typeck, kind, live_at);
|
||||
polonius::emit_drop_facts(self.typeck, dropped_local, &kind);
|
||||
polonius::legacy::emit_drop_facts(
|
||||
self.typeck.tcx(),
|
||||
dropped_local,
|
||||
&kind,
|
||||
self.typeck.universal_regions,
|
||||
self.typeck.all_facts,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
use std::rc::Rc;
|
||||
use std::{fmt, iter, mem};
|
||||
|
||||
use either::Either;
|
||||
use rustc_abi::{FIRST_VARIANT, FieldIdx};
|
||||
use rustc_data_structures::frozen::Frozen;
|
||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
|
@ -107,7 +106,6 @@ mod relate_tys;
|
|||
/// # Parameters
|
||||
///
|
||||
/// - `infcx` -- inference context to use
|
||||
/// - `param_env` -- parameter environment to use for trait solving
|
||||
/// - `body` -- MIR body to type-check
|
||||
/// - `promoted` -- map of promoted constants within `body`
|
||||
/// - `universal_regions` -- the universal regions from `body`s function signature
|
||||
|
@ -155,7 +153,7 @@ pub(crate) fn type_check<'a, 'tcx>(
|
|||
|
||||
debug!(?normalized_inputs_and_output);
|
||||
|
||||
let mut checker = TypeChecker {
|
||||
let mut typeck = TypeChecker {
|
||||
infcx,
|
||||
last_span: body.span,
|
||||
body,
|
||||
|
@ -171,24 +169,22 @@ pub(crate) fn type_check<'a, 'tcx>(
|
|||
constraints: &mut constraints,
|
||||
};
|
||||
|
||||
checker.check_user_type_annotations();
|
||||
typeck.check_user_type_annotations();
|
||||
|
||||
let mut verifier = TypeVerifier { cx: &mut checker, promoted, last_span: body.span };
|
||||
let mut verifier = TypeVerifier { typeck: &mut typeck, promoted, last_span: body.span };
|
||||
verifier.visit_body(body);
|
||||
|
||||
checker.typeck_mir(body);
|
||||
checker.equate_inputs_and_outputs(body, &normalized_inputs_and_output);
|
||||
checker.check_signature_annotation(body);
|
||||
typeck.typeck_mir(body);
|
||||
typeck.equate_inputs_and_outputs(body, &normalized_inputs_and_output);
|
||||
typeck.check_signature_annotation(body);
|
||||
|
||||
liveness::generate(&mut checker, body, &elements, flow_inits, move_data);
|
||||
liveness::generate(&mut typeck, body, &elements, flow_inits, move_data);
|
||||
|
||||
translate_outlives_facts(&mut checker);
|
||||
let opaque_type_values = infcx.take_opaque_types();
|
||||
|
||||
let opaque_type_values = opaque_type_values
|
||||
let opaque_type_values = infcx
|
||||
.take_opaque_types()
|
||||
.into_iter()
|
||||
.map(|(opaque_type_key, decl)| {
|
||||
let _: Result<_, ErrorGuaranteed> = checker.fully_perform_op(
|
||||
let _: Result<_, ErrorGuaranteed> = typeck.fully_perform_op(
|
||||
Locations::All(body.span),
|
||||
ConstraintCategory::OpaqueType,
|
||||
CustomTypeOp::new(
|
||||
|
@ -218,11 +214,11 @@ pub(crate) fn type_check<'a, 'tcx>(
|
|||
match region.kind() {
|
||||
ty::ReVar(_) => region,
|
||||
ty::RePlaceholder(placeholder) => {
|
||||
checker.constraints.placeholder_region(infcx, placeholder)
|
||||
typeck.constraints.placeholder_region(infcx, placeholder)
|
||||
}
|
||||
_ => ty::Region::new_var(
|
||||
infcx.tcx,
|
||||
checker.universal_regions.to_region_vid(region),
|
||||
typeck.universal_regions.to_region_vid(region),
|
||||
),
|
||||
}
|
||||
});
|
||||
|
@ -234,30 +230,6 @@ pub(crate) fn type_check<'a, 'tcx>(
|
|||
MirTypeckResults { constraints, universal_region_relations, opaque_type_values }
|
||||
}
|
||||
|
||||
fn translate_outlives_facts(typeck: &mut TypeChecker<'_, '_>) {
|
||||
if let Some(facts) = typeck.all_facts {
|
||||
let _prof_timer = typeck.infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
||||
let location_table = typeck.location_table;
|
||||
facts.subset_base.extend(
|
||||
typeck.constraints.outlives_constraints.outlives().iter().flat_map(
|
||||
|constraint: &OutlivesConstraint<'_>| {
|
||||
if let Some(from_location) = constraint.locations.from_location() {
|
||||
Either::Left(iter::once((
|
||||
constraint.sup.into(),
|
||||
constraint.sub.into(),
|
||||
location_table.mid_index(from_location),
|
||||
)))
|
||||
} else {
|
||||
Either::Right(location_table.all_points().map(move |location| {
|
||||
(constraint.sup.into(), constraint.sub.into(), location)
|
||||
}))
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn mirbug(tcx: TyCtxt<'_>, span: Span, msg: String) {
|
||||
// We sometimes see MIR failures (notably predicate failures) due to
|
||||
|
@ -276,7 +248,7 @@ enum FieldAccessError {
|
|||
/// type, calling `span_mirbug` and returning an error type if there
|
||||
/// is a problem.
|
||||
struct TypeVerifier<'a, 'b, 'tcx> {
|
||||
cx: &'a mut TypeChecker<'b, 'tcx>,
|
||||
typeck: &'a mut TypeChecker<'b, 'tcx>,
|
||||
promoted: &'b IndexSlice<Promoted, Body<'tcx>>,
|
||||
last_span: Span,
|
||||
}
|
||||
|
@ -298,9 +270,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
self.super_const_operand(constant, location);
|
||||
let ty = self.sanitize_type(constant, constant.const_.ty());
|
||||
|
||||
self.cx.infcx.tcx.for_each_free_region(&ty, |live_region| {
|
||||
let live_region_vid = self.cx.universal_regions.to_region_vid(live_region);
|
||||
self.cx.constraints.liveness_constraints.add_location(live_region_vid, location);
|
||||
self.typeck.infcx.tcx.for_each_free_region(&ty, |live_region| {
|
||||
let live_region_vid = self.typeck.universal_regions.to_region_vid(live_region);
|
||||
self.typeck.constraints.liveness_constraints.add_location(live_region_vid, location);
|
||||
});
|
||||
|
||||
// HACK(compiler-errors): Constants that are gathered into Body.required_consts
|
||||
|
@ -312,14 +284,14 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
};
|
||||
|
||||
if let Some(annotation_index) = constant.user_ty {
|
||||
if let Err(terr) = self.cx.relate_type_and_user_type(
|
||||
if let Err(terr) = self.typeck.relate_type_and_user_type(
|
||||
constant.const_.ty(),
|
||||
ty::Invariant,
|
||||
&UserTypeProjection { base: annotation_index, projs: vec![] },
|
||||
locations,
|
||||
ConstraintCategory::Boring,
|
||||
) {
|
||||
let annotation = &self.cx.user_type_annotations[annotation_index];
|
||||
let annotation = &self.typeck.user_type_annotations[annotation_index];
|
||||
span_mirbug!(
|
||||
self,
|
||||
constant,
|
||||
|
@ -348,9 +320,12 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
promoted: &Body<'tcx>,
|
||||
ty,
|
||||
san_ty| {
|
||||
if let Err(terr) =
|
||||
verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring)
|
||||
{
|
||||
if let Err(terr) = verifier.typeck.eq_types(
|
||||
ty,
|
||||
san_ty,
|
||||
locations,
|
||||
ConstraintCategory::Boring,
|
||||
) {
|
||||
span_mirbug!(
|
||||
verifier,
|
||||
promoted,
|
||||
|
@ -368,21 +343,21 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
let promoted_ty = promoted_body.return_ty();
|
||||
check_err(self, promoted_body, ty, promoted_ty);
|
||||
} else {
|
||||
self.cx.ascribe_user_type(
|
||||
self.typeck.ascribe_user_type(
|
||||
constant.const_.ty(),
|
||||
ty::UserType::new(ty::UserTypeKind::TypeOf(uv.def, UserArgs {
|
||||
args: uv.args,
|
||||
user_self_ty: None,
|
||||
})),
|
||||
locations.span(self.cx.body),
|
||||
locations.span(self.typeck.body),
|
||||
);
|
||||
}
|
||||
} else if let Some(static_def_id) = constant.check_static_ptr(tcx) {
|
||||
let unnormalized_ty = tcx.type_of(static_def_id).instantiate_identity();
|
||||
let normalized_ty = self.cx.normalize(unnormalized_ty, locations);
|
||||
let normalized_ty = self.typeck.normalize(unnormalized_ty, locations);
|
||||
let literal_ty = constant.const_.ty().builtin_deref(true).unwrap();
|
||||
|
||||
if let Err(terr) = self.cx.eq_types(
|
||||
if let Err(terr) = self.typeck.eq_types(
|
||||
literal_ty,
|
||||
normalized_ty,
|
||||
locations,
|
||||
|
@ -394,7 +369,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
|
||||
if let ty::FnDef(def_id, args) = *constant.const_.ty().kind() {
|
||||
let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, args);
|
||||
self.cx.normalize_and_prove_instantiated_predicates(
|
||||
self.typeck.normalize_and_prove_instantiated_predicates(
|
||||
def_id,
|
||||
instantiated_predicates,
|
||||
locations,
|
||||
|
@ -404,7 +379,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
tcx.impl_of_method(def_id).map(|imp| tcx.def_kind(imp)),
|
||||
Some(DefKind::Impl { of_trait: true })
|
||||
));
|
||||
self.cx.prove_predicates(
|
||||
self.typeck.prove_predicates(
|
||||
args.types().map(|ty| ty::ClauseKind::WellFormed(ty.into())),
|
||||
locations,
|
||||
ConstraintCategory::Boring,
|
||||
|
@ -438,7 +413,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
local_decl.ty
|
||||
};
|
||||
|
||||
if let Err(terr) = self.cx.relate_type_and_user_type(
|
||||
if let Err(terr) = self.typeck.relate_type_and_user_type(
|
||||
ty,
|
||||
ty::Invariant,
|
||||
user_ty,
|
||||
|
@ -468,11 +443,11 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
|
||||
impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
||||
fn body(&self) -> &Body<'tcx> {
|
||||
self.cx.body
|
||||
self.typeck.body
|
||||
}
|
||||
|
||||
fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.cx.infcx.tcx
|
||||
self.typeck.infcx.tcx
|
||||
}
|
||||
|
||||
fn sanitize_type(&mut self, parent: &dyn fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
|
@ -522,7 +497,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// whether the bounds fully apply: in effect, the rule is
|
||||
// that if a value of some type could implement `Copy`, then
|
||||
// it must.
|
||||
self.cx.prove_trait_ref(
|
||||
self.typeck.prove_trait_ref(
|
||||
trait_ref,
|
||||
location.to_locations(),
|
||||
ConstraintCategory::CopyBound,
|
||||
|
@ -537,7 +512,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// checker on the promoted MIR, then transfer the constraints back to
|
||||
// the main MIR, changing the locations to the provided location.
|
||||
|
||||
let parent_body = mem::replace(&mut self.cx.body, promoted_body);
|
||||
let parent_body = mem::replace(&mut self.typeck.body, promoted_body);
|
||||
|
||||
// Use new sets of constraints and closure bounds so that we can
|
||||
// modify their locations.
|
||||
|
@ -548,18 +523,18 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// Don't try to add borrow_region facts for the promoted MIR
|
||||
|
||||
let mut swap_constraints = |this: &mut Self| {
|
||||
mem::swap(this.cx.all_facts, all_facts);
|
||||
mem::swap(&mut this.cx.constraints.outlives_constraints, &mut constraints);
|
||||
mem::swap(&mut this.cx.constraints.liveness_constraints, &mut liveness_constraints);
|
||||
mem::swap(this.typeck.all_facts, all_facts);
|
||||
mem::swap(&mut this.typeck.constraints.outlives_constraints, &mut constraints);
|
||||
mem::swap(&mut this.typeck.constraints.liveness_constraints, &mut liveness_constraints);
|
||||
};
|
||||
|
||||
swap_constraints(self);
|
||||
|
||||
self.visit_body(promoted_body);
|
||||
|
||||
self.cx.typeck_mir(promoted_body);
|
||||
self.typeck.typeck_mir(promoted_body);
|
||||
|
||||
self.cx.body = parent_body;
|
||||
self.typeck.body = parent_body;
|
||||
// Merge the outlives constraints back in, at the given location.
|
||||
swap_constraints(self);
|
||||
|
||||
|
@ -575,7 +550,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// temporary from the user's point of view.
|
||||
constraint.category = ConstraintCategory::Boring;
|
||||
}
|
||||
self.cx.constraints.outlives_constraints.push(constraint)
|
||||
self.typeck.constraints.outlives_constraints.push(constraint)
|
||||
}
|
||||
// If the region is live at least one location in the promoted MIR,
|
||||
// then add a liveness constraint to the main MIR for this region
|
||||
|
@ -585,7 +560,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
// unordered.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for region in liveness_constraints.live_regions_unordered() {
|
||||
self.cx.constraints.liveness_constraints.add_location(region, location);
|
||||
self.typeck.constraints.liveness_constraints.add_location(region, location);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -669,13 +644,13 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
},
|
||||
ProjectionElem::Field(field, fty) => {
|
||||
let fty = self.sanitize_type(place, fty);
|
||||
let fty = self.cx.normalize(fty, location);
|
||||
let fty = self.typeck.normalize(fty, location);
|
||||
match self.field_ty(place, base, field, location) {
|
||||
Ok(ty) => {
|
||||
let ty = self.cx.normalize(ty, location);
|
||||
let ty = self.typeck.normalize(ty, location);
|
||||
debug!(?fty, ?ty);
|
||||
|
||||
if let Err(terr) = self.cx.relate_types(
|
||||
if let Err(terr) = self.typeck.relate_types(
|
||||
ty,
|
||||
self.get_ambient_variance(context),
|
||||
fty,
|
||||
|
@ -707,8 +682,8 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
}
|
||||
ProjectionElem::OpaqueCast(ty) => {
|
||||
let ty = self.sanitize_type(place, ty);
|
||||
let ty = self.cx.normalize(ty, location);
|
||||
self.cx
|
||||
let ty = self.typeck.normalize(ty, location);
|
||||
self.typeck
|
||||
.relate_types(
|
||||
ty,
|
||||
self.get_ambient_variance(context),
|
||||
|
@ -817,7 +792,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
};
|
||||
|
||||
if let Some(field) = variant.fields.get(field) {
|
||||
Ok(self.cx.normalize(field.ty(tcx, args), location))
|
||||
Ok(self.typeck.normalize(field.ty(tcx, args), location))
|
||||
} else {
|
||||
Err(FieldAccessError::OutOfRange { field_count: variant.fields.len() })
|
||||
}
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_middle::mir::coverage::{
|
||||
CovTerm, CoverageIdsInfo, Expression, FunctionCoverageInfo, Mapping, MappingKind, Op,
|
||||
SourceRegion,
|
||||
};
|
||||
|
||||
use crate::coverageinfo::ffi::{Counter, CounterExpression, ExprKind};
|
||||
|
||||
pub(crate) struct FunctionCoverage<'tcx> {
|
||||
pub(crate) function_coverage_info: &'tcx FunctionCoverageInfo,
|
||||
/// If `None`, the corresponding function is unused.
|
||||
ids_info: Option<&'tcx CoverageIdsInfo>,
|
||||
}
|
||||
|
||||
impl<'tcx> FunctionCoverage<'tcx> {
|
||||
pub(crate) fn new_used(
|
||||
function_coverage_info: &'tcx FunctionCoverageInfo,
|
||||
ids_info: &'tcx CoverageIdsInfo,
|
||||
) -> Self {
|
||||
Self { function_coverage_info, ids_info: Some(ids_info) }
|
||||
}
|
||||
|
||||
pub(crate) fn new_unused(function_coverage_info: &'tcx FunctionCoverageInfo) -> Self {
|
||||
Self { function_coverage_info, ids_info: None }
|
||||
}
|
||||
|
||||
/// Returns true for a used (called) function, and false for an unused function.
|
||||
pub(crate) fn is_used(&self) -> bool {
|
||||
self.ids_info.is_some()
|
||||
}
|
||||
|
||||
/// Return the source hash, generated from the HIR node structure, and used to indicate whether
|
||||
/// or not the source code structure changed between different compilations.
|
||||
pub(crate) fn source_hash(&self) -> u64 {
|
||||
if self.is_used() { self.function_coverage_info.function_source_hash } else { 0 }
|
||||
}
|
||||
|
||||
/// Convert this function's coverage expression data into a form that can be
|
||||
/// passed through FFI to LLVM.
|
||||
pub(crate) fn counter_expressions(
|
||||
&self,
|
||||
) -> impl Iterator<Item = CounterExpression> + ExactSizeIterator + Captures<'_> {
|
||||
// We know that LLVM will optimize out any unused expressions before
|
||||
// producing the final coverage map, so there's no need to do the same
|
||||
// thing on the Rust side unless we're confident we can do much better.
|
||||
// (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.)
|
||||
|
||||
self.function_coverage_info.expressions.iter().map(move |&Expression { lhs, op, rhs }| {
|
||||
CounterExpression {
|
||||
lhs: self.counter_for_term(lhs),
|
||||
kind: match op {
|
||||
Op::Add => ExprKind::Add,
|
||||
Op::Subtract => ExprKind::Subtract,
|
||||
},
|
||||
rhs: self.counter_for_term(rhs),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Converts this function's coverage mappings into an intermediate form
|
||||
/// that will be used by `mapgen` when preparing for FFI.
|
||||
pub(crate) fn counter_regions(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (MappingKind, &SourceRegion)> + ExactSizeIterator {
|
||||
self.function_coverage_info.mappings.iter().map(move |mapping| {
|
||||
let Mapping { kind, source_region } = mapping;
|
||||
let kind =
|
||||
kind.map_terms(|term| if self.is_zero_term(term) { CovTerm::Zero } else { term });
|
||||
(kind, source_region)
|
||||
})
|
||||
}
|
||||
|
||||
fn counter_for_term(&self, term: CovTerm) -> Counter {
|
||||
if self.is_zero_term(term) { Counter::ZERO } else { Counter::from_term(term) }
|
||||
}
|
||||
|
||||
fn is_zero_term(&self, term: CovTerm) -> bool {
|
||||
match self.ids_info {
|
||||
Some(ids_info) => ids_info.is_zero_term(term),
|
||||
// This function is unused, so all coverage counters/expressions are zero.
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
use std::iter;
|
||||
|
||||
use itertools::Itertools as _;
|
||||
use itertools::Itertools;
|
||||
use rustc_abi::Align;
|
||||
use rustc_codegen_ssa::traits::{
|
||||
BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods,
|
||||
|
@ -8,8 +8,8 @@ use rustc_codegen_ssa::traits::{
|
|||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_middle::{bug, mir};
|
||||
use rustc_session::RemapFileNameExt;
|
||||
use rustc_session::config::RemapPathScopeComponents;
|
||||
use rustc_span::def_id::DefIdSet;
|
||||
|
@ -18,7 +18,6 @@ use tracing::debug;
|
|||
|
||||
use crate::common::CodegenCx;
|
||||
use crate::coverageinfo::llvm_cov;
|
||||
use crate::coverageinfo::map_data::FunctionCoverage;
|
||||
use crate::coverageinfo::mapgen::covfun::prepare_covfun_record;
|
||||
use crate::llvm;
|
||||
|
||||
|
@ -49,46 +48,40 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) {
|
|||
|
||||
debug!("Generating coverage map for CodegenUnit: `{}`", cx.codegen_unit.name());
|
||||
|
||||
// In order to show that unused functions have coverage counts of zero (0), LLVM requires the
|
||||
// functions exist. Generate synthetic functions with a (required) single counter, and add the
|
||||
// MIR `Coverage` code regions to the `function_coverage_map`, before calling
|
||||
// `ctx.take_function_coverage_map()`.
|
||||
if cx.codegen_unit.is_code_coverage_dead_code_cgu() {
|
||||
add_unused_functions(cx);
|
||||
}
|
||||
|
||||
// FIXME(#132395): Can this be none even when coverage is enabled?
|
||||
let function_coverage_map = match cx.coverage_cx {
|
||||
Some(ref cx) => cx.take_function_coverage_map(),
|
||||
let instances_used = match cx.coverage_cx {
|
||||
Some(ref cx) => cx.instances_used.borrow(),
|
||||
None => return,
|
||||
};
|
||||
if function_coverage_map.is_empty() {
|
||||
// This CGU has no functions with coverage instrumentation.
|
||||
return;
|
||||
}
|
||||
|
||||
let all_file_names = function_coverage_map
|
||||
// The order of entries in this global file table needs to be deterministic,
|
||||
// and ideally should also be independent of the details of stable-hashing,
|
||||
// because coverage tests snapshots (`.cov-map`) can observe the order and
|
||||
// would need to be re-blessed if it changes. As long as those requirements
|
||||
// are satisfied, the order can be arbitrary.
|
||||
let mut global_file_table = GlobalFileTable::new();
|
||||
|
||||
let mut covfun_records = instances_used
|
||||
.iter()
|
||||
.map(|(_, fn_cov)| fn_cov.function_coverage_info.body_span)
|
||||
.map(|span| span_file_name(tcx, span));
|
||||
let global_file_table = GlobalFileTable::new(all_file_names);
|
||||
|
||||
// Encode all filenames referenced by coverage mappings in this CGU.
|
||||
let filenames_buffer = global_file_table.make_filenames_buffer(tcx);
|
||||
// The `llvm-cov` tool uses this hash to associate each covfun record with
|
||||
// its corresponding filenames table, since the final binary will typically
|
||||
// contain multiple covmap records from different compilation units.
|
||||
let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer);
|
||||
|
||||
let mut unused_function_names = Vec::new();
|
||||
|
||||
let covfun_records = function_coverage_map
|
||||
.into_iter()
|
||||
.filter_map(|(instance, function_coverage)| {
|
||||
prepare_covfun_record(tcx, &global_file_table, instance, &function_coverage)
|
||||
})
|
||||
.copied()
|
||||
// Sort by symbol name, so that the global file table is built in an
|
||||
// order that doesn't depend on the stable-hash-based order in which
|
||||
// instances were visited during codegen.
|
||||
.sorted_by_cached_key(|&instance| tcx.symbol_name(instance).name)
|
||||
.filter_map(|instance| prepare_covfun_record(tcx, &mut global_file_table, instance, true))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// In a single designated CGU, also prepare covfun records for functions
|
||||
// in this crate that were instrumented for coverage, but are unused.
|
||||
if cx.codegen_unit.is_code_coverage_dead_code_cgu() {
|
||||
let mut unused_instances = gather_unused_function_instances(cx);
|
||||
// Sort the unused instances by symbol name, for the same reason as the used ones.
|
||||
unused_instances.sort_by_cached_key(|&instance| tcx.symbol_name(instance).name);
|
||||
covfun_records.extend(unused_instances.into_iter().filter_map(|instance| {
|
||||
prepare_covfun_record(tcx, &mut global_file_table, instance, false)
|
||||
}));
|
||||
}
|
||||
|
||||
// If there are no covfun records for this CGU, don't generate a covmap record.
|
||||
// Emitting a covmap record without any covfun records causes `llvm-cov` to
|
||||
// fail when generating coverage reports, and if there are no covfun records
|
||||
|
@ -98,6 +91,15 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) {
|
|||
return;
|
||||
}
|
||||
|
||||
// Encode all filenames referenced by coverage mappings in this CGU.
|
||||
let filenames_buffer = global_file_table.make_filenames_buffer(tcx);
|
||||
// The `llvm-cov` tool uses this hash to associate each covfun record with
|
||||
// its corresponding filenames table, since the final binary will typically
|
||||
// contain multiple covmap records from different compilation units.
|
||||
let filenames_hash = llvm_cov::hash_bytes(&filenames_buffer);
|
||||
|
||||
let mut unused_function_names = vec![];
|
||||
|
||||
for covfun in &covfun_records {
|
||||
unused_function_names.extend(covfun.mangled_function_name_if_unused());
|
||||
|
||||
|
@ -137,22 +139,13 @@ struct GlobalFileTable {
|
|||
}
|
||||
|
||||
impl GlobalFileTable {
|
||||
fn new(all_file_names: impl IntoIterator<Item = Symbol>) -> Self {
|
||||
// Collect all of the filenames into a set. Filenames usually come in
|
||||
// contiguous runs, so we can dedup adjacent ones to save work.
|
||||
let mut raw_file_table = all_file_names.into_iter().dedup().collect::<FxIndexSet<Symbol>>();
|
||||
|
||||
// Sort the file table by its actual string values, not the arbitrary
|
||||
// ordering of its symbols.
|
||||
raw_file_table.sort_unstable_by(|a, b| a.as_str().cmp(b.as_str()));
|
||||
|
||||
Self { raw_file_table }
|
||||
fn new() -> Self {
|
||||
Self { raw_file_table: FxIndexSet::default() }
|
||||
}
|
||||
|
||||
fn global_file_id_for_file_name(&self, file_name: Symbol) -> GlobalFileId {
|
||||
let raw_id = self.raw_file_table.get_index_of(&file_name).unwrap_or_else(|| {
|
||||
bug!("file name not found in prepared global file table: {file_name}");
|
||||
});
|
||||
fn global_file_id_for_file_name(&mut self, file_name: Symbol) -> GlobalFileId {
|
||||
// Ensure the given file has a table entry, and get its index.
|
||||
let (raw_id, _) = self.raw_file_table.insert_full(file_name);
|
||||
// The raw file table doesn't include an entry for the working dir
|
||||
// (which has ID 0), so add 1 to get the correct ID.
|
||||
GlobalFileId::from_usize(raw_id + 1)
|
||||
|
@ -264,39 +257,35 @@ fn generate_covmap_record<'ll>(cx: &CodegenCx<'ll, '_>, version: u32, filenames_
|
|||
/// coverage map (in a single designated CGU) so that we still emit coverage mappings for them.
|
||||
/// We also end up adding their symbol names to a special global array that LLVM will include in
|
||||
/// its embedded coverage data.
|
||||
fn add_unused_functions(cx: &CodegenCx<'_, '_>) {
|
||||
fn gather_unused_function_instances<'tcx>(cx: &CodegenCx<'_, 'tcx>) -> Vec<ty::Instance<'tcx>> {
|
||||
assert!(cx.codegen_unit.is_code_coverage_dead_code_cgu());
|
||||
|
||||
let tcx = cx.tcx;
|
||||
let usage = prepare_usage_sets(tcx);
|
||||
|
||||
let is_unused_fn = |def_id: LocalDefId| -> bool {
|
||||
let def_id = def_id.to_def_id();
|
||||
|
||||
// To be eligible for "unused function" mappings, a definition must:
|
||||
// - Be function-like
|
||||
// Usage sets expect `DefId`, so convert from `LocalDefId`.
|
||||
let d: DefId = LocalDefId::to_def_id(def_id);
|
||||
// To be potentially eligible for "unused function" mappings, a definition must:
|
||||
// - Be eligible for coverage instrumentation
|
||||
// - Not participate directly in codegen (or have lost all its coverage statements)
|
||||
// - Not have any coverage statements inlined into codegenned functions
|
||||
tcx.def_kind(def_id).is_fn_like()
|
||||
&& (!usage.all_mono_items.contains(&def_id)
|
||||
|| usage.missing_own_coverage.contains(&def_id))
|
||||
&& !usage.used_via_inlining.contains(&def_id)
|
||||
tcx.is_eligible_for_coverage(def_id)
|
||||
&& (!usage.all_mono_items.contains(&d) || usage.missing_own_coverage.contains(&d))
|
||||
&& !usage.used_via_inlining.contains(&d)
|
||||
};
|
||||
|
||||
// Scan for unused functions that were instrumented for coverage.
|
||||
for def_id in tcx.mir_keys(()).iter().copied().filter(|&def_id| is_unused_fn(def_id)) {
|
||||
// Get the coverage info from MIR, skipping functions that were never instrumented.
|
||||
let body = tcx.optimized_mir(def_id);
|
||||
let Some(function_coverage_info) = body.function_coverage_info.as_deref() else { continue };
|
||||
|
||||
// FIXME(79651): Consider trying to filter out dummy instantiations of
|
||||
// FIXME(#79651): Consider trying to filter out dummy instantiations of
|
||||
// unused generic functions from library crates, because they can produce
|
||||
// "unused instantiation" in coverage reports even when they are actually
|
||||
// used by some downstream crate in the same binary.
|
||||
|
||||
debug!("generating unused fn: {def_id:?}");
|
||||
add_unused_function_coverage(cx, def_id, function_coverage_info);
|
||||
}
|
||||
tcx.mir_keys(())
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|&def_id| is_unused_fn(def_id))
|
||||
.map(|def_id| make_dummy_instance(tcx, def_id))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
struct UsageSets<'tcx> {
|
||||
|
@ -361,16 +350,11 @@ fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> {
|
|||
UsageSets { all_mono_items, used_via_inlining, missing_own_coverage }
|
||||
}
|
||||
|
||||
fn add_unused_function_coverage<'tcx>(
|
||||
cx: &CodegenCx<'_, 'tcx>,
|
||||
def_id: LocalDefId,
|
||||
function_coverage_info: &'tcx mir::coverage::FunctionCoverageInfo,
|
||||
) {
|
||||
let tcx = cx.tcx;
|
||||
let def_id = def_id.to_def_id();
|
||||
fn make_dummy_instance<'tcx>(tcx: TyCtxt<'tcx>, local_def_id: LocalDefId) -> ty::Instance<'tcx> {
|
||||
let def_id = local_def_id.to_def_id();
|
||||
|
||||
// Make a dummy instance that fills in all generics with placeholders.
|
||||
let instance = ty::Instance::new(
|
||||
ty::Instance::new(
|
||||
def_id,
|
||||
ty::GenericArgs::for_item(tcx, def_id, |param, _| {
|
||||
if let ty::GenericParamDefKind::Lifetime = param.kind {
|
||||
|
@ -379,9 +363,5 @@ fn add_unused_function_coverage<'tcx>(
|
|||
tcx.mk_param_from_def(param)
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// An unused function's mappings will all be rewritten to map to zero.
|
||||
let function_coverage = FunctionCoverage::new_unused(function_coverage_info);
|
||||
cx.coverage_cx().function_coverage_map.borrow_mut().insert(instance, function_coverage);
|
||||
)
|
||||
}
|
||||
|
|
|
@ -11,13 +11,14 @@ use rustc_codegen_ssa::traits::{
|
|||
BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods,
|
||||
};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::coverage::MappingKind;
|
||||
use rustc_middle::mir::coverage::{
|
||||
CovTerm, CoverageIdsInfo, Expression, FunctionCoverageInfo, Mapping, MappingKind, Op,
|
||||
};
|
||||
use rustc_middle::ty::{Instance, TyCtxt};
|
||||
use rustc_target::spec::HasTargetSpec;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::common::CodegenCx;
|
||||
use crate::coverageinfo::map_data::FunctionCoverage;
|
||||
use crate::coverageinfo::mapgen::{GlobalFileTable, VirtualFileMapping, span_file_name};
|
||||
use crate::coverageinfo::{ffi, llvm_cov};
|
||||
use crate::llvm;
|
||||
|
@ -45,20 +46,25 @@ impl<'tcx> CovfunRecord<'tcx> {
|
|||
|
||||
pub(crate) fn prepare_covfun_record<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
global_file_table: &GlobalFileTable,
|
||||
global_file_table: &mut GlobalFileTable,
|
||||
instance: Instance<'tcx>,
|
||||
function_coverage: &FunctionCoverage<'tcx>,
|
||||
is_used: bool,
|
||||
) -> Option<CovfunRecord<'tcx>> {
|
||||
let fn_cov_info = tcx.instance_mir(instance.def).function_coverage_info.as_deref()?;
|
||||
let ids_info = tcx.coverage_ids_info(instance.def);
|
||||
|
||||
let expressions = prepare_expressions(fn_cov_info, ids_info, is_used);
|
||||
|
||||
let mut covfun = CovfunRecord {
|
||||
mangled_function_name: tcx.symbol_name(instance).name,
|
||||
source_hash: function_coverage.source_hash(),
|
||||
is_used: function_coverage.is_used(),
|
||||
source_hash: if is_used { fn_cov_info.function_source_hash } else { 0 },
|
||||
is_used,
|
||||
virtual_file_mapping: VirtualFileMapping::default(),
|
||||
expressions: function_coverage.counter_expressions().collect::<Vec<_>>(),
|
||||
expressions,
|
||||
regions: ffi::Regions::default(),
|
||||
};
|
||||
|
||||
fill_region_tables(tcx, global_file_table, function_coverage, &mut covfun);
|
||||
fill_region_tables(tcx, global_file_table, fn_cov_info, ids_info, &mut covfun);
|
||||
|
||||
if covfun.regions.has_no_regions() {
|
||||
if covfun.is_used {
|
||||
|
@ -72,20 +78,50 @@ pub(crate) fn prepare_covfun_record<'tcx>(
|
|||
Some(covfun)
|
||||
}
|
||||
|
||||
/// Convert the function's coverage-counter expressions into a form suitable for FFI.
|
||||
fn prepare_expressions(
|
||||
fn_cov_info: &FunctionCoverageInfo,
|
||||
ids_info: &CoverageIdsInfo,
|
||||
is_used: bool,
|
||||
) -> Vec<ffi::CounterExpression> {
|
||||
// If any counters or expressions were removed by MIR opts, replace their
|
||||
// terms with zero.
|
||||
let counter_for_term = |term| {
|
||||
if !is_used || ids_info.is_zero_term(term) {
|
||||
ffi::Counter::ZERO
|
||||
} else {
|
||||
ffi::Counter::from_term(term)
|
||||
}
|
||||
};
|
||||
|
||||
// We know that LLVM will optimize out any unused expressions before
|
||||
// producing the final coverage map, so there's no need to do the same
|
||||
// thing on the Rust side unless we're confident we can do much better.
|
||||
// (See `CounterExpressionsMinimizer` in `CoverageMappingWriter.cpp`.)
|
||||
fn_cov_info
|
||||
.expressions
|
||||
.iter()
|
||||
.map(move |&Expression { lhs, op, rhs }| ffi::CounterExpression {
|
||||
lhs: counter_for_term(lhs),
|
||||
kind: match op {
|
||||
Op::Add => ffi::ExprKind::Add,
|
||||
Op::Subtract => ffi::ExprKind::Subtract,
|
||||
},
|
||||
rhs: counter_for_term(rhs),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
/// Populates the mapping region tables in the current function's covfun record.
|
||||
fn fill_region_tables<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
global_file_table: &GlobalFileTable,
|
||||
function_coverage: &FunctionCoverage<'tcx>,
|
||||
global_file_table: &mut GlobalFileTable,
|
||||
fn_cov_info: &'tcx FunctionCoverageInfo,
|
||||
ids_info: &'tcx CoverageIdsInfo,
|
||||
covfun: &mut CovfunRecord<'tcx>,
|
||||
) {
|
||||
let counter_regions = function_coverage.counter_regions();
|
||||
if counter_regions.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Currently a function's mappings must all be in the same file as its body span.
|
||||
let file_name = span_file_name(tcx, function_coverage.function_coverage_info.body_span);
|
||||
let file_name = span_file_name(tcx, fn_cov_info.body_span);
|
||||
|
||||
// Look up the global file ID for that filename.
|
||||
let global_file_id = global_file_table.global_file_id_for_file_name(file_name);
|
||||
|
@ -99,10 +135,14 @@ fn fill_region_tables<'tcx>(
|
|||
|
||||
// For each counter/region pair in this function+file, convert it to a
|
||||
// form suitable for FFI.
|
||||
for (mapping_kind, region) in counter_regions {
|
||||
debug!("Adding counter {mapping_kind:?} to map for {region:?}");
|
||||
let span = ffi::CoverageSpan::from_source_region(local_file_id, region);
|
||||
match mapping_kind {
|
||||
let is_zero_term = |term| !covfun.is_used || ids_info.is_zero_term(term);
|
||||
for Mapping { kind, ref source_region } in &fn_cov_info.mappings {
|
||||
// If the mapping refers to counters/expressions that were removed by
|
||||
// MIR opts, replace those occurrences with zero.
|
||||
let kind = kind.map_terms(|term| if is_zero_term(term) { CovTerm::Zero } else { term });
|
||||
|
||||
let span = ffi::CoverageSpan::from_source_region(local_file_id, source_region);
|
||||
match kind {
|
||||
MappingKind::Code(term) => {
|
||||
code_regions.push(ffi::CodeRegion { span, counter: ffi::Counter::from_term(term) });
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use rustc_abi::Size;
|
|||
use rustc_codegen_ssa::traits::{
|
||||
BuilderMethods, ConstCodegenMethods, CoverageInfoBuilderMethods, MiscCodegenMethods,
|
||||
};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_middle::mir::coverage::CoverageKind;
|
||||
use rustc_middle::ty::Instance;
|
||||
use rustc_middle::ty::layout::HasTyCtxt;
|
||||
|
@ -13,18 +13,16 @@ use tracing::{debug, instrument};
|
|||
|
||||
use crate::builder::Builder;
|
||||
use crate::common::CodegenCx;
|
||||
use crate::coverageinfo::map_data::FunctionCoverage;
|
||||
use crate::llvm;
|
||||
|
||||
pub(crate) mod ffi;
|
||||
mod llvm_cov;
|
||||
pub(crate) mod map_data;
|
||||
mod mapgen;
|
||||
|
||||
/// Extra per-CGU context/state needed for coverage instrumentation.
|
||||
pub(crate) struct CguCoverageContext<'ll, 'tcx> {
|
||||
/// Coverage data for each instrumented function identified by DefId.
|
||||
pub(crate) function_coverage_map: RefCell<FxIndexMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
|
||||
pub(crate) instances_used: RefCell<FxIndexSet<Instance<'tcx>>>,
|
||||
pub(crate) pgo_func_name_var_map: RefCell<FxHashMap<Instance<'tcx>, &'ll llvm::Value>>,
|
||||
pub(crate) mcdc_condition_bitmap_map: RefCell<FxHashMap<Instance<'tcx>, Vec<&'ll llvm::Value>>>,
|
||||
|
||||
|
@ -34,17 +32,13 @@ pub(crate) struct CguCoverageContext<'ll, 'tcx> {
|
|||
impl<'ll, 'tcx> CguCoverageContext<'ll, 'tcx> {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
function_coverage_map: Default::default(),
|
||||
instances_used: RefCell::<FxIndexSet<_>>::default(),
|
||||
pgo_func_name_var_map: Default::default(),
|
||||
mcdc_condition_bitmap_map: Default::default(),
|
||||
covfun_section_name: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn take_function_coverage_map(&self) -> FxIndexMap<Instance<'tcx>, FunctionCoverage<'tcx>> {
|
||||
self.function_coverage_map.replace(FxIndexMap::default())
|
||||
}
|
||||
|
||||
/// LLVM use a temp value to record evaluated mcdc test vector of each decision, which is
|
||||
/// called condition bitmap. In order to handle nested decisions, several condition bitmaps can
|
||||
/// be allocated for a function body. These values are named `mcdc.addr.{i}` and are a 32-bit
|
||||
|
@ -157,12 +151,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
|
|||
// Mark the instance as used in this CGU, for coverage purposes.
|
||||
// This includes functions that were not partitioned into this CGU,
|
||||
// but were MIR-inlined into one of this CGU's functions.
|
||||
coverage_cx.function_coverage_map.borrow_mut().entry(instance).or_insert_with(|| {
|
||||
FunctionCoverage::new_used(
|
||||
function_coverage_info,
|
||||
bx.tcx.coverage_ids_info(instance.def),
|
||||
)
|
||||
});
|
||||
coverage_cx.instances_used.borrow_mut().insert(instance);
|
||||
|
||||
match *kind {
|
||||
CoverageKind::SpanMarker | CoverageKind::BlockMarker { .. } => unreachable!(
|
||||
|
|
|
@ -1569,18 +1569,18 @@ impl DiagCtxtInner {
|
|||
debug!(?diagnostic);
|
||||
debug!(?self.emitted_diagnostics);
|
||||
|
||||
let already_emitted_sub = |sub: &mut Subdiag| {
|
||||
let not_yet_emitted = |sub: &mut Subdiag| {
|
||||
debug!(?sub);
|
||||
if sub.level != OnceNote && sub.level != OnceHelp {
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
let mut hasher = StableHasher::new();
|
||||
sub.hash(&mut hasher);
|
||||
let diagnostic_hash = hasher.finish();
|
||||
debug!(?diagnostic_hash);
|
||||
!self.emitted_diagnostics.insert(diagnostic_hash)
|
||||
self.emitted_diagnostics.insert(diagnostic_hash)
|
||||
};
|
||||
diagnostic.children.extract_if(already_emitted_sub).for_each(|_| {});
|
||||
diagnostic.children.retain_mut(not_yet_emitted);
|
||||
if already_emitted {
|
||||
let msg = "duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`";
|
||||
diagnostic.sub(Note, msg, MultiSpan::new());
|
||||
|
|
|
@ -205,7 +205,7 @@ impl EarlyLintPass for NonAsciiIdents {
|
|||
(IdentifierType::Not_NFKC, "Not_NFKC"),
|
||||
] {
|
||||
let codepoints: Vec<_> =
|
||||
chars.extract_if(|(_, ty)| *ty == Some(id_ty)).collect();
|
||||
chars.extract_if(.., |(_, ty)| *ty == Some(id_ty)).collect();
|
||||
if codepoints.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ impl EarlyLintPass for NonAsciiIdents {
|
|||
}
|
||||
|
||||
let remaining = chars
|
||||
.extract_if(|(c, _)| !GeneralSecurityProfile::identifier_allowed(*c))
|
||||
.extract_if(.., |(c, _)| !GeneralSecurityProfile::identifier_allowed(*c))
|
||||
.collect::<Vec<_>>();
|
||||
if !remaining.is_empty() {
|
||||
cx.emit_span_lint(UNCOMMON_CODEPOINTS, sp, IdentifierUncommonCodepoints {
|
||||
|
|
|
@ -544,7 +544,7 @@ impl<'tcx> Collector<'tcx> {
|
|||
// can move them to the end of the list below.
|
||||
let mut existing = self
|
||||
.libs
|
||||
.extract_if(|lib| {
|
||||
.extract_if(.., |lib| {
|
||||
if lib.name.as_str() == passed_lib.name {
|
||||
// FIXME: This whole logic is questionable, whether modifiers are
|
||||
// involved or not, library reordering and kind overriding without
|
||||
|
|
|
@ -309,7 +309,7 @@ pub fn suggest_constraining_type_params<'a>(
|
|||
let Some(param) = param else { return false };
|
||||
|
||||
{
|
||||
let mut sized_constraints = constraints.extract_if(|(_, def_id, _)| {
|
||||
let mut sized_constraints = constraints.extract_if(.., |(_, def_id, _)| {
|
||||
def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized))
|
||||
});
|
||||
if let Some((_, def_id, _)) = sized_constraints.next() {
|
||||
|
|
|
@ -2817,11 +2817,11 @@ fn show_candidates(
|
|||
path_strings.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
path_strings.dedup_by(|a, b| a.0 == b.0);
|
||||
let core_path_strings =
|
||||
path_strings.extract_if(|p| p.0.starts_with("core::")).collect::<Vec<_>>();
|
||||
path_strings.extract_if(.., |p| p.0.starts_with("core::")).collect::<Vec<_>>();
|
||||
let std_path_strings =
|
||||
path_strings.extract_if(|p| p.0.starts_with("std::")).collect::<Vec<_>>();
|
||||
path_strings.extract_if(.., |p| p.0.starts_with("std::")).collect::<Vec<_>>();
|
||||
let foreign_crate_path_strings =
|
||||
path_strings.extract_if(|p| !p.0.starts_with("crate::")).collect::<Vec<_>>();
|
||||
path_strings.extract_if(.., |p| !p.0.starts_with("crate::")).collect::<Vec<_>>();
|
||||
|
||||
// We list the `crate` local paths first.
|
||||
// Then we list the `std`/`core` paths.
|
||||
|
|
|
@ -629,7 +629,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
|||
// Try to filter out intrinsics candidates, as long as we have
|
||||
// some other candidates to suggest.
|
||||
let intrinsic_candidates: Vec<_> = candidates
|
||||
.extract_if(|sugg| {
|
||||
.extract_if(.., |sugg| {
|
||||
let path = path_names_to_string(&sugg.path);
|
||||
path.starts_with("core::intrinsics::") || path.starts_with("std::intrinsics::")
|
||||
})
|
||||
|
|
|
@ -13,8 +13,8 @@ pub(crate) fn target() -> Target {
|
|||
llvm_target: "powerpc64le-unknown-linux-musl".into(),
|
||||
metadata: crate::spec::TargetMetadata {
|
||||
description: Some("64-bit PowerPC Linux with musl 1.2.3, Little Endian".into()),
|
||||
tier: Some(3),
|
||||
host_tools: Some(false),
|
||||
tier: Some(2),
|
||||
host_tools: Some(true),
|
||||
std: Some(true),
|
||||
},
|
||||
pointer_width: 64,
|
||||
|
|
|
@ -447,7 +447,7 @@ pub fn normalize_param_env_or_error<'tcx>(
|
|||
// This works fairly well because trait matching does not actually care about param-env
|
||||
// TypeOutlives predicates - these are normally used by regionck.
|
||||
let outlives_predicates: Vec<_> = predicates
|
||||
.extract_if(|predicate| {
|
||||
.extract_if(.., |predicate| {
|
||||
matches!(predicate.kind().skip_binder(), ty::ClauseKind::TypeOutlives(..))
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -1939,9 +1939,7 @@ pub struct ExtractIf<
|
|||
T: 'a,
|
||||
F: 'a,
|
||||
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
|
||||
> where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
{
|
||||
> {
|
||||
list: &'a mut LinkedList<T, A>,
|
||||
it: Option<NonNull<Node<T>>>,
|
||||
pred: F,
|
||||
|
@ -1979,10 +1977,7 @@ where
|
|||
}
|
||||
|
||||
#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
||||
impl<T: fmt::Debug, F> fmt::Debug for ExtractIf<'_, T, F>
|
||||
where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
{
|
||||
impl<T: fmt::Debug, F> fmt::Debug for ExtractIf<'_, T, F> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("ExtractIf").field(&self.list).finish()
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use core::ops::{Range, RangeBounds};
|
||||
use core::{ptr, slice};
|
||||
|
||||
use super::Vec;
|
||||
|
@ -14,7 +15,7 @@ use crate::alloc::{Allocator, Global};
|
|||
/// #![feature(extract_if)]
|
||||
///
|
||||
/// let mut v = vec![0, 1, 2];
|
||||
/// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(|x| *x % 2 == 0);
|
||||
/// let iter: std::vec::ExtractIf<'_, _, _> = v.extract_if(.., |x| *x % 2 == 0);
|
||||
/// ```
|
||||
#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
||||
#[derive(Debug)]
|
||||
|
@ -24,24 +25,32 @@ pub struct ExtractIf<
|
|||
T,
|
||||
F,
|
||||
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
|
||||
> where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
{
|
||||
pub(super) vec: &'a mut Vec<T, A>,
|
||||
> {
|
||||
vec: &'a mut Vec<T, A>,
|
||||
/// The index of the item that will be inspected by the next call to `next`.
|
||||
pub(super) idx: usize,
|
||||
idx: usize,
|
||||
/// Elements at and beyond this point will be retained. Must be equal or smaller than `old_len`.
|
||||
end: usize,
|
||||
/// The number of items that have been drained (removed) thus far.
|
||||
pub(super) del: usize,
|
||||
del: usize,
|
||||
/// The original length of `vec` prior to draining.
|
||||
pub(super) old_len: usize,
|
||||
old_len: usize,
|
||||
/// The filter test predicate.
|
||||
pub(super) pred: F,
|
||||
pred: F,
|
||||
}
|
||||
|
||||
impl<'a, T, F, A: Allocator> ExtractIf<'a, T, F, A> {
|
||||
pub(super) fn new<R: RangeBounds<usize>>(vec: &'a mut Vec<T, A>, pred: F, range: R) -> Self {
|
||||
let old_len = vec.len();
|
||||
let Range { start, end } = slice::range(range, ..old_len);
|
||||
|
||||
// Guard against the vec getting leaked (leak amplification)
|
||||
unsafe {
|
||||
vec.set_len(0);
|
||||
}
|
||||
ExtractIf { vec, idx: start, del: 0, end, old_len, pred }
|
||||
}
|
||||
|
||||
impl<T, F, A: Allocator> ExtractIf<'_, T, F, A>
|
||||
where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
{
|
||||
/// Returns a reference to the underlying allocator.
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[inline]
|
||||
|
@ -59,7 +68,7 @@ where
|
|||
|
||||
fn next(&mut self) -> Option<T> {
|
||||
unsafe {
|
||||
while self.idx < self.old_len {
|
||||
while self.idx < self.end {
|
||||
let i = self.idx;
|
||||
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
|
||||
let drained = (self.pred)(&mut v[i]);
|
||||
|
@ -82,24 +91,15 @@ where
|
|||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
(0, Some(self.old_len - self.idx))
|
||||
(0, Some(self.end - self.idx))
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
||||
impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A>
|
||||
where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
{
|
||||
impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A> {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
if self.idx < self.old_len && self.del > 0 {
|
||||
// This is a pretty messed up state, and there isn't really an
|
||||
// obviously right thing to do. We don't want to keep trying
|
||||
// to execute `pred`, so we just backshift all the unprocessed
|
||||
// elements and tell the vec that they still exist. The backshift
|
||||
// is required to prevent a double-drop of the last successfully
|
||||
// drained item prior to a panic in the predicate.
|
||||
let ptr = self.vec.as_mut_ptr();
|
||||
let src = ptr.add(self.idx);
|
||||
let dst = src.sub(self.del);
|
||||
|
|
|
@ -3615,12 +3615,15 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
|
||||
}
|
||||
|
||||
/// Creates an iterator which uses a closure to determine if an element should be removed.
|
||||
/// Creates an iterator which uses a closure to determine if element in the range should be removed.
|
||||
///
|
||||
/// If the closure returns true, then the element is removed and yielded.
|
||||
/// If the closure returns false, the element will remain in the vector and will not be yielded
|
||||
/// by the iterator.
|
||||
///
|
||||
/// Only elements that fall in the provided range are considered for extraction, but any elements
|
||||
/// after the range will still have to be moved if any element has been extracted.
|
||||
///
|
||||
/// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
|
||||
/// or the iteration short-circuits, then the remaining elements will be retained.
|
||||
/// Use [`retain`] with a negated predicate if you do not need the returned iterator.
|
||||
|
@ -3630,10 +3633,12 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
/// Using this method is equivalent to the following code:
|
||||
///
|
||||
/// ```
|
||||
/// # use std::cmp::min;
|
||||
/// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
|
||||
/// # let mut vec = vec![1, 2, 3, 4, 5, 6];
|
||||
/// let mut i = 0;
|
||||
/// while i < vec.len() {
|
||||
/// # let range = 1..4;
|
||||
/// let mut i = range.start;
|
||||
/// while i < min(vec.len(), range.end) {
|
||||
/// if some_predicate(&mut vec[i]) {
|
||||
/// let val = vec.remove(i);
|
||||
/// // your code here
|
||||
|
@ -3648,8 +3653,12 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
/// But `extract_if` is easier to use. `extract_if` is also more efficient,
|
||||
/// because it can backshift the elements of the array in bulk.
|
||||
///
|
||||
/// Note that `extract_if` also lets you mutate every element in the filter closure,
|
||||
/// regardless of whether you choose to keep or remove it.
|
||||
/// Note that `extract_if` also lets you mutate the elements passed to the filter closure,
|
||||
/// regardless of whether you choose to keep or remove them.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// If `range` is out of bounds.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -3659,25 +3668,29 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
/// #![feature(extract_if)]
|
||||
/// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
|
||||
///
|
||||
/// let evens = numbers.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
/// let evens = numbers.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
/// let odds = numbers;
|
||||
///
|
||||
/// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
|
||||
/// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
|
||||
/// ```
|
||||
///
|
||||
/// Using the range argument to only process a part of the vector:
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(extract_if)]
|
||||
/// let mut items = vec![0, 0, 0, 0, 0, 0, 0, 1, 2, 1, 2, 1, 2];
|
||||
/// let ones = items.extract_if(7.., |x| *x == 1).collect::<Vec<_>>();
|
||||
/// assert_eq!(items, vec![0, 0, 0, 0, 0, 0, 0, 2, 2, 2]);
|
||||
/// assert_eq!(ones.len(), 3);
|
||||
/// ```
|
||||
#[unstable(feature = "extract_if", reason = "recently added", issue = "43244")]
|
||||
pub fn extract_if<F>(&mut self, filter: F) -> ExtractIf<'_, T, F, A>
|
||||
pub fn extract_if<F, R>(&mut self, range: R, filter: F) -> ExtractIf<'_, T, F, A>
|
||||
where
|
||||
F: FnMut(&mut T) -> bool,
|
||||
R: RangeBounds<usize>,
|
||||
{
|
||||
let old_len = self.len();
|
||||
|
||||
// Guard against us getting leaked (leak amplification)
|
||||
unsafe {
|
||||
self.set_len(0);
|
||||
}
|
||||
|
||||
ExtractIf { vec: self, idx: 0, del: 0, old_len, pred: filter }
|
||||
ExtractIf::new(self, filter, range)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1414,7 +1414,7 @@ fn extract_if_empty() {
|
|||
let mut vec: Vec<i32> = vec![];
|
||||
|
||||
{
|
||||
let mut iter = vec.extract_if(|_| true);
|
||||
let mut iter = vec.extract_if(.., |_| true);
|
||||
assert_eq!(iter.size_hint(), (0, Some(0)));
|
||||
assert_eq!(iter.next(), None);
|
||||
assert_eq!(iter.size_hint(), (0, Some(0)));
|
||||
|
@ -1431,7 +1431,7 @@ fn extract_if_zst() {
|
|||
let initial_len = vec.len();
|
||||
let mut count = 0;
|
||||
{
|
||||
let mut iter = vec.extract_if(|_| true);
|
||||
let mut iter = vec.extract_if(.., |_| true);
|
||||
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
|
||||
while let Some(_) = iter.next() {
|
||||
count += 1;
|
||||
|
@ -1454,7 +1454,7 @@ fn extract_if_false() {
|
|||
let initial_len = vec.len();
|
||||
let mut count = 0;
|
||||
{
|
||||
let mut iter = vec.extract_if(|_| false);
|
||||
let mut iter = vec.extract_if(.., |_| false);
|
||||
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
|
||||
for _ in iter.by_ref() {
|
||||
count += 1;
|
||||
|
@ -1476,7 +1476,7 @@ fn extract_if_true() {
|
|||
let initial_len = vec.len();
|
||||
let mut count = 0;
|
||||
{
|
||||
let mut iter = vec.extract_if(|_| true);
|
||||
let mut iter = vec.extract_if(.., |_| true);
|
||||
assert_eq!(iter.size_hint(), (0, Some(initial_len)));
|
||||
while let Some(_) = iter.next() {
|
||||
count += 1;
|
||||
|
@ -1492,6 +1492,31 @@ fn extract_if_true() {
|
|||
assert_eq!(vec, vec![]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_if_ranges() {
|
||||
let mut vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
let mut count = 0;
|
||||
let it = vec.extract_if(1..=3, |_| {
|
||||
count += 1;
|
||||
true
|
||||
});
|
||||
assert_eq!(it.collect::<Vec<_>>(), vec![1, 2, 3]);
|
||||
assert_eq!(vec, vec![0, 4, 5, 6, 7, 8, 9, 10]);
|
||||
assert_eq!(count, 3);
|
||||
|
||||
let it = vec.extract_if(1..=3, |_| false);
|
||||
assert_eq!(it.collect::<Vec<_>>(), vec![]);
|
||||
assert_eq!(vec, vec![0, 4, 5, 6, 7, 8, 9, 10]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn extract_if_out_of_bounds() {
|
||||
let mut vec = vec![0, 1];
|
||||
let _ = vec.extract_if(5.., |_| true).for_each(drop);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extract_if_complex() {
|
||||
{
|
||||
|
@ -1501,7 +1526,7 @@ fn extract_if_complex() {
|
|||
39,
|
||||
];
|
||||
|
||||
let removed = vec.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
let removed = vec.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
assert_eq!(removed.len(), 10);
|
||||
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
|
||||
|
||||
|
@ -1515,7 +1540,7 @@ fn extract_if_complex() {
|
|||
2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
|
||||
];
|
||||
|
||||
let removed = vec.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
let removed = vec.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
assert_eq!(removed.len(), 10);
|
||||
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
|
||||
|
||||
|
@ -1528,7 +1553,7 @@ fn extract_if_complex() {
|
|||
let mut vec =
|
||||
vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36];
|
||||
|
||||
let removed = vec.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
let removed = vec.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
assert_eq!(removed.len(), 10);
|
||||
assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
|
||||
|
||||
|
@ -1540,7 +1565,7 @@ fn extract_if_complex() {
|
|||
// [xxxxxxxxxx+++++++++++]
|
||||
let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19];
|
||||
|
||||
let removed = vec.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
let removed = vec.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
assert_eq!(removed.len(), 10);
|
||||
assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
|
@ -1552,7 +1577,7 @@ fn extract_if_complex() {
|
|||
// [+++++++++++xxxxxxxxxx]
|
||||
let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
|
||||
|
||||
let removed = vec.extract_if(|x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
let removed = vec.extract_if(.., |x| *x % 2 == 0).collect::<Vec<_>>();
|
||||
assert_eq!(removed.len(), 10);
|
||||
assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
|
@ -1600,7 +1625,7 @@ fn extract_if_consumed_panic() {
|
|||
}
|
||||
c.index < 6
|
||||
};
|
||||
let drain = data.extract_if(filter);
|
||||
let drain = data.extract_if(.., filter);
|
||||
|
||||
// NOTE: The ExtractIf is explicitly consumed
|
||||
drain.for_each(drop);
|
||||
|
@ -1653,7 +1678,7 @@ fn extract_if_unconsumed_panic() {
|
|||
}
|
||||
c.index < 6
|
||||
};
|
||||
let _drain = data.extract_if(filter);
|
||||
let _drain = data.extract_if(.., filter);
|
||||
|
||||
// NOTE: The ExtractIf is dropped without being consumed
|
||||
});
|
||||
|
@ -1669,7 +1694,7 @@ fn extract_if_unconsumed_panic() {
|
|||
#[test]
|
||||
fn extract_if_unconsumed() {
|
||||
let mut vec = vec![1, 2, 3, 4];
|
||||
let drain = vec.extract_if(|&mut x| x % 2 != 0);
|
||||
let drain = vec.extract_if(.., |&mut x| x % 2 != 0);
|
||||
drop(drain);
|
||||
assert_eq!(vec, [1, 2, 3, 4]);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ macro_rules! uint_impl {
|
|||
ActualT = $ActualT:ident,
|
||||
SignedT = $SignedT:ident,
|
||||
|
||||
// There are all for use *only* in doc comments.
|
||||
// These are all for use *only* in doc comments.
|
||||
// As such, they're all passed as literals -- passing them as a string
|
||||
// literal is fine if they need to be multiple code tokens.
|
||||
// In non-comments, use the associated constants rather than these.
|
||||
|
|
|
@ -245,6 +245,11 @@ v(
|
|||
"target.mips64el-unknown-linux-muslabi64.musl-root",
|
||||
"mips64el-unknown-linux-muslabi64 install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-powerpc64le",
|
||||
"target.powerpc64le-unknown-linux-musl.musl-root",
|
||||
"powerpc64le-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-riscv32gc",
|
||||
"target.riscv32gc-unknown-linux-musl.musl-root",
|
||||
|
|
|
@ -217,6 +217,7 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool {
|
|||
("powerpc-unknown-linux-gnu", false),
|
||||
("powerpc64-unknown-linux-gnu", false),
|
||||
("powerpc64le-unknown-linux-gnu", false),
|
||||
("powerpc64le-unknown-linux-musl", false),
|
||||
("riscv64gc-unknown-linux-gnu", false),
|
||||
("s390x-unknown-linux-gnu", false),
|
||||
("x86_64-unknown-freebsd", false),
|
||||
|
|
|
@ -91,7 +91,7 @@ macro_rules! rustc {
|
|||
#[test]
|
||||
fn test_valid() {
|
||||
// make sure multi suite paths are accepted
|
||||
check_cli(["test", "tests/ui/attr-start.rs", "tests/ui/attr-shebang.rs"]);
|
||||
check_cli(["test", "tests/ui/bootstrap/self-test/a.rs", "tests/ui/bootstrap/self-test/b.rs"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -3,23 +3,46 @@ FROM ubuntu:22.04
|
|||
COPY scripts/cross-apt-packages.sh /scripts/
|
||||
RUN sh /scripts/cross-apt-packages.sh
|
||||
|
||||
COPY scripts/crosstool-ng-git.sh /scripts/
|
||||
RUN sh /scripts/crosstool-ng-git.sh
|
||||
|
||||
COPY scripts/rustbuild-setup.sh /scripts/
|
||||
RUN sh /scripts/rustbuild-setup.sh
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
COPY scripts/crosstool-ng-build.sh /scripts/
|
||||
COPY host-x86_64/dist-powerpc64le-linux/powerpc64le-unknown-linux-musl.defconfig /tmp/crosstool.defconfig
|
||||
RUN /scripts/crosstool-ng-build.sh
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
|
||||
COPY host-x86_64/dist-powerpc64le-linux/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/
|
||||
COPY host-x86_64/dist-powerpc64le-linux/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /build/
|
||||
RUN ./build-powerpc64le-toolchain.sh
|
||||
|
||||
COPY scripts/sccache.sh /scripts/
|
||||
RUN sh /scripts/sccache.sh
|
||||
|
||||
ENV PATH=$PATH:/x-tools/powerpc64le-unknown-linux-musl/bin
|
||||
|
||||
ENV \
|
||||
AR_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-ar \
|
||||
CC_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-gcc \
|
||||
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++
|
||||
CXX_powerpc64le_unknown_linux_gnu=powerpc64le-linux-gnu-g++ \
|
||||
AR_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-ar \
|
||||
CC_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-gcc \
|
||||
CXX_powerpc64le_unknown_linux_musl=powerpc64le-unknown-linux-musl-g++
|
||||
|
||||
ENV HOSTS=powerpc64le-unknown-linux-gnu
|
||||
ENV HOSTS=powerpc64le-unknown-linux-gnu,powerpc64le-unknown-linux-musl
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS \
|
||||
--enable-extended \
|
||||
--enable-full-tools \
|
||||
--enable-profiler \
|
||||
--enable-sanitizers \
|
||||
--disable-docs \
|
||||
--set target.powerpc64le-unknown-linux-musl.crt-static=false \
|
||||
--musl-root-powerpc64le=/x-tools/powerpc64le-unknown-linux-musl/powerpc64le-unknown-linux-musl/sysroot/usr
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS --enable-extended --enable-profiler --disable-docs
|
||||
ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
CT_CONFIG_VERSION="4"
|
||||
CT_EXPERIMENTAL=y
|
||||
CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
|
||||
CT_USE_MIRROR=y
|
||||
CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
|
||||
CT_ARCH_POWERPC=y
|
||||
CT_ARCH_LE=y
|
||||
CT_ARCH_64=y
|
||||
# CT_DEMULTILIB is not set
|
||||
CT_ARCH_ARCH="powerpc64le"
|
||||
CT_KERNEL_LINUX=y
|
||||
CT_LINUX_V_4_19=y
|
||||
CT_LIBC_MUSL=y
|
||||
CT_MUSL_V_1_2_3=y
|
||||
CT_CC_LANG_CXX=y
|
||||
CT_GETTEXT_NEEDED=y
|
|
@ -190,7 +190,7 @@ auto:
|
|||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-powerpc64le-linux
|
||||
<<: *job-linux-4c
|
||||
<<: *job-linux-4c-largedisk
|
||||
|
||||
- image: dist-riscv64-linux
|
||||
<<: *job-linux-4c
|
||||
|
|
|
@ -66,6 +66,7 @@
|
|||
- [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md)
|
||||
- [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md)
|
||||
- [powerpc64-ibm-aix](platform-support/aix.md)
|
||||
- [powerpc64le-unknown-linux-musl](platform-support/powerpc64le-unknown-linux-musl.md)
|
||||
- [riscv32e*-unknown-none-elf](platform-support/riscv32e-unknown-none-elf.md)
|
||||
- [riscv32i*-unknown-none-elf](platform-support/riscv32-unknown-none-elf.md)
|
||||
- [riscv32im-risc0-zkvm-elf](platform-support/riscv32im-risc0-zkvm-elf.md)
|
||||
|
|
|
@ -97,6 +97,7 @@ target | notes
|
|||
`powerpc-unknown-linux-gnu` | PowerPC Linux (kernel 3.2, glibc 2.17)
|
||||
`powerpc64-unknown-linux-gnu` | PPC64 Linux (kernel 3.2, glibc 2.17)
|
||||
`powerpc64le-unknown-linux-gnu` | PPC64LE Linux (kernel 3.10, glibc 2.17)
|
||||
[`powerpc64le-unknown-linux-musl`](platform-support/powerpc64le-unknown-linux-musl.md) | PPC64LE Linux (kernel 4.19, musl 1.2.3)
|
||||
[`riscv64gc-unknown-linux-gnu`](platform-support/riscv64gc-unknown-linux-gnu.md) | RISC-V Linux (kernel 4.20, glibc 2.29)
|
||||
[`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20, musl 1.2.3)
|
||||
[`s390x-unknown-linux-gnu`](platform-support/s390x-unknown-linux-gnu.md) | S390x Linux (kernel 3.2, glibc 2.17)
|
||||
|
@ -348,7 +349,6 @@ target | std | host | notes
|
|||
`powerpc-unknown-freebsd` | ? | | PowerPC FreeBSD
|
||||
`powerpc64-unknown-linux-musl` | ? | | 64-bit PowerPC Linux with musl 1.2.3
|
||||
[`powerpc64-wrs-vxworks`](platform-support/vxworks.md) | ✓ | |
|
||||
`powerpc64le-unknown-linux-musl` | ? | | 64-bit PowerPC Linux with musl 1.2.3, Little Endian
|
||||
[`powerpc64-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/powerpc64
|
||||
[`powerpc64-ibm-aix`](platform-support/aix.md) | ? | | 64-bit AIX (7.2 and newer)
|
||||
`riscv32gc-unknown-linux-gnu` | ✓ | | RISC-V Linux (kernel 5.4, glibc 2.33)
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
# powerpc64le-unknown-linux-musl
|
||||
|
||||
**Tier: 2**
|
||||
|
||||
Target for 64-bit little endian PowerPC Linux programs using musl libc.
|
||||
|
||||
## Target maintainers
|
||||
|
||||
- [@Gelbpunkt](https://github.com/Gelbpunkt)
|
||||
- [@famfo](https://github.com/famfo)
|
||||
- [@neuschaefer](https://github.com/neuschaefer)
|
||||
|
||||
## Requirements
|
||||
|
||||
Building the target itself requires a 64-bit little endian PowerPC compiler that is supported by `cc-rs`.
|
||||
|
||||
## Building the target
|
||||
|
||||
The target can be built by enabling it for a `rustc` build.
|
||||
|
||||
```toml
|
||||
[build]
|
||||
target = ["powerpc64le-unknown-linux-musl"]
|
||||
```
|
||||
|
||||
Make sure your C compiler is included in `$PATH`, then add it to the `config.toml`:
|
||||
|
||||
```toml
|
||||
[target.powerpc64le-unknown-linux-musl]
|
||||
cc = "powerpc64le-linux-musl-gcc"
|
||||
cxx = "powerpc64le-linux-musl-g++"
|
||||
ar = "powerpc64le-linux-musl-ar"
|
||||
linker = "powerpc64le-linux-musl-gcc"
|
||||
```
|
||||
|
||||
## Building Rust programs
|
||||
|
||||
This target are distributed through `rustup`, and otherwise require no
|
||||
special configuration.
|
||||
|
||||
## Cross-compilation
|
||||
|
||||
This target can be cross-compiled from any host.
|
||||
|
||||
## Testing
|
||||
|
||||
This target can be tested as normal with `x.py` on a 64-bit little endian
|
||||
PowerPC host or via QEMU emulation.
|
|
@ -37,6 +37,7 @@ static HOSTS: &[&str] = &[
|
|||
"powerpc-unknown-linux-gnu",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-musl",
|
||||
"riscv64gc-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
|
@ -131,6 +132,7 @@ static TARGETS: &[&str] = &[
|
|||
"powerpc-unknown-linux-gnu",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-musl",
|
||||
"riscv32i-unknown-none-elf",
|
||||
"riscv32im-risc0-zkvm-elf",
|
||||
"riscv32im-unknown-none-elf",
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
Function name: unused_mod::main
|
||||
Raw bytes (9): 0x[01, 02, 00, 01, 01, 04, 01, 02, 02]
|
||||
Raw bytes (9): 0x[01, 01, 00, 01, 01, 04, 01, 02, 02]
|
||||
Number of files: 1
|
||||
- file 0 => global file 2
|
||||
- file 0 => global file 1
|
||||
Number of expressions: 0
|
||||
Number of file 0 mappings: 1
|
||||
- Code(Counter(0)) at (prev + 4, 1) to (start + 2, 2)
|
||||
Highest counter ID seen: c0
|
||||
|
||||
Function name: unused_mod::unused_module::never_called_function (unused)
|
||||
Raw bytes (9): 0x[01, 01, 00, 01, 00, 02, 01, 02, 02]
|
||||
Raw bytes (9): 0x[01, 02, 00, 01, 00, 02, 01, 02, 02]
|
||||
Number of files: 1
|
||||
- file 0 => global file 1
|
||||
- file 0 => global file 2
|
||||
Number of expressions: 0
|
||||
Number of file 0 mappings: 1
|
||||
- Code(Zero) at (prev + 2, 1) to (start + 2, 2)
|
||||
|
|
2
tests/ui/bootstrap/self-test/a.rs
Normal file
2
tests/ui/bootstrap/self-test/a.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
//! Not used by compiler, this is used by bootstrap cli self-test.
|
||||
//@ ignore-test
|
2
tests/ui/bootstrap/self-test/b.rs
Normal file
2
tests/ui/bootstrap/self-test/b.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
//! Not used by compiler, used by bootstrap cli self-test.
|
||||
//@ ignore-test
|
24
tests/ui/impl-trait/rpit/inherits-lifetime.rs
Normal file
24
tests/ui/impl-trait/rpit/inherits-lifetime.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
//! Check that lifetimes are inherited in RPIT.
|
||||
//! Previously, the hidden lifetime of T::Bar would be overlooked
|
||||
//! and would instead end up as <T as Foo<'static>>::Bar.
|
||||
//!
|
||||
//! Regression test for <https://github.com/rust-lang/rust/issues/51525>.
|
||||
|
||||
//@ check-pass
|
||||
|
||||
trait Foo<'a> {
|
||||
type Bar;
|
||||
}
|
||||
|
||||
impl<'a> Foo<'a> for u32 {
|
||||
type Bar = &'a ();
|
||||
}
|
||||
|
||||
fn baz<'a, T>() -> impl IntoIterator<Item = T::Bar>
|
||||
where
|
||||
T: Foo<'a>,
|
||||
{
|
||||
None
|
||||
}
|
||||
|
||||
fn main() {}
|
Loading…
Add table
Add a link
Reference in a new issue