Auto merge of #136000 - matthiaskrgr:rollup-j6ge32r, r=matthiaskrgr
Rollup of 6 pull requests Successful merges: - #135873 (coverage: Prepare for upcoming changes to counter creation) - #135926 (Implement `needs-subprocess` directive, and cleanup a bunch of tests to use `needs-{subprocess,threads}`) - #135950 (Tidy Python improvements) - #135956 (Make `Vec::pop_if` a bit more presentable) - #135966 ([AIX] Allow different sized load and store in `tests/assembly/powerpc64-struct-abi.rs`) - #135983 (Doc difference between extend and extend_from_slice) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
1e9b0177da
233 changed files with 593 additions and 606 deletions
|
@ -51,7 +51,7 @@ pub(crate) fn prepare_covfun_record<'tcx>(
|
|||
is_used: bool,
|
||||
) -> Option<CovfunRecord<'tcx>> {
|
||||
let fn_cov_info = tcx.instance_mir(instance.def).function_coverage_info.as_deref()?;
|
||||
let ids_info = tcx.coverage_ids_info(instance.def);
|
||||
let ids_info = tcx.coverage_ids_info(instance.def)?;
|
||||
|
||||
let expressions = prepare_expressions(fn_cov_info, ids_info, is_used);
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ use rustc_codegen_ssa::traits::{
|
|||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_middle::mir::coverage::CoverageKind;
|
||||
use rustc_middle::ty::Instance;
|
||||
use rustc_middle::ty::layout::HasTyCtxt;
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use crate::builder::Builder;
|
||||
|
@ -147,6 +146,10 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
|
|||
debug!("function has a coverage statement but no coverage info");
|
||||
return;
|
||||
};
|
||||
let Some(ids_info) = bx.tcx.coverage_ids_info(instance.def) else {
|
||||
debug!("function has a coverage statement but no IDs info");
|
||||
return;
|
||||
};
|
||||
|
||||
// Mark the instance as used in this CGU, for coverage purposes.
|
||||
// This includes functions that were not partitioned into this CGU,
|
||||
|
@ -162,8 +165,7 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
|
|||
// be smaller than the number originally inserted by the instrumentor,
|
||||
// if some high-numbered counters were removed by MIR optimizations.
|
||||
// If so, LLVM's profiler runtime will use fewer physical counters.
|
||||
let num_counters =
|
||||
bx.tcx().coverage_ids_info(instance.def).num_counters_after_mir_opts();
|
||||
let num_counters = ids_info.num_counters_after_mir_opts();
|
||||
assert!(
|
||||
num_counters as usize <= function_coverage_info.num_counters,
|
||||
"num_counters disagreement: query says {num_counters} but function info only has {}",
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::fmt::{self, Debug, Formatter};
|
|||
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_index::bit_set::DenseBitSet;
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable};
|
||||
use rustc_span::Span;
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
|
@ -72,7 +72,7 @@ impl ConditionId {
|
|||
/// Enum that can hold a constant zero value, the ID of an physical coverage
|
||||
/// counter, or the ID of a coverage-counter expression.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub enum CovTerm {
|
||||
Zero,
|
||||
Counter(CounterId),
|
||||
|
@ -89,7 +89,7 @@ impl Debug for CovTerm {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub enum CoverageKind {
|
||||
/// Marks a span that might otherwise not be represented in MIR, so that
|
||||
/// coverage instrumentation can associate it with its enclosing block/BCB.
|
||||
|
@ -151,7 +151,7 @@ impl Debug for CoverageKind {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
|
||||
#[derive(TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable)]
|
||||
pub enum Op {
|
||||
Subtract,
|
||||
Add,
|
||||
|
@ -168,7 +168,7 @@ impl Op {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct Expression {
|
||||
pub lhs: CovTerm,
|
||||
pub op: Op,
|
||||
|
@ -176,7 +176,7 @@ pub struct Expression {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub enum MappingKind {
|
||||
/// Associates a normal region of code with a counter/expression/zero.
|
||||
Code(CovTerm),
|
||||
|
@ -208,7 +208,7 @@ impl MappingKind {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct Mapping {
|
||||
pub kind: MappingKind,
|
||||
pub span: Span,
|
||||
|
@ -218,7 +218,7 @@ pub struct Mapping {
|
|||
/// to be used in conjunction with the individual coverage statements injected
|
||||
/// into the function's basic blocks.
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct FunctionCoverageInfo {
|
||||
pub function_source_hash: u64,
|
||||
pub body_span: Span,
|
||||
|
@ -238,7 +238,7 @@ pub struct FunctionCoverageInfo {
|
|||
/// ("Hi" indicates that this is "high-level" information collected at the
|
||||
/// THIR/MIR boundary, before the MIR-based coverage instrumentation pass.)
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct CoverageInfoHi {
|
||||
/// 1 more than the highest-numbered [`CoverageKind::BlockMarker`] that was
|
||||
/// injected into the MIR body. This makes it possible to allocate per-ID
|
||||
|
@ -252,7 +252,7 @@ pub struct CoverageInfoHi {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct BranchSpan {
|
||||
pub span: Span,
|
||||
pub true_marker: BlockMarkerId,
|
||||
|
@ -260,7 +260,7 @@ pub struct BranchSpan {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct ConditionInfo {
|
||||
pub condition_id: ConditionId,
|
||||
pub true_next_id: Option<ConditionId>,
|
||||
|
@ -268,7 +268,7 @@ pub struct ConditionInfo {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct MCDCBranchSpan {
|
||||
pub span: Span,
|
||||
pub condition_info: ConditionInfo,
|
||||
|
@ -277,14 +277,14 @@ pub struct MCDCBranchSpan {
|
|||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct DecisionInfo {
|
||||
pub bitmap_idx: u32,
|
||||
pub num_conditions: u16,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable)]
|
||||
pub struct MCDCDecisionSpan {
|
||||
pub span: Span,
|
||||
pub end_markers: Vec<BlockMarkerId>,
|
||||
|
|
|
@ -358,6 +358,8 @@ pub struct Body<'tcx> {
|
|||
///
|
||||
/// Only present if coverage is enabled and this function is eligible.
|
||||
/// Boxed to limit space overhead in non-coverage builds.
|
||||
#[type_foldable(identity)]
|
||||
#[type_visitable(ignore)]
|
||||
pub coverage_info_hi: Option<Box<coverage::CoverageInfoHi>>,
|
||||
|
||||
/// Per-function coverage information added by the `InstrumentCoverage`
|
||||
|
@ -366,6 +368,8 @@ pub struct Body<'tcx> {
|
|||
///
|
||||
/// If `-Cinstrument-coverage` is not active, or if an individual function
|
||||
/// is not eligible for coverage, then this should always be `None`.
|
||||
#[type_foldable(identity)]
|
||||
#[type_visitable(ignore)]
|
||||
pub function_coverage_info: Option<Box<coverage::FunctionCoverageInfo>>,
|
||||
}
|
||||
|
||||
|
|
|
@ -417,7 +417,14 @@ pub enum StatementKind<'tcx> {
|
|||
///
|
||||
/// Interpreters and codegen backends that don't support coverage instrumentation
|
||||
/// can usually treat this as a no-op.
|
||||
Coverage(CoverageKind),
|
||||
Coverage(
|
||||
// Coverage statements are unlikely to ever contain type information in
|
||||
// the foreseeable future, so excluding them from TypeFoldable/TypeVisitable
|
||||
// avoids some unhelpful derive boilerplate.
|
||||
#[type_foldable(identity)]
|
||||
#[type_visitable(ignore)]
|
||||
CoverageKind,
|
||||
),
|
||||
|
||||
/// Denotes a call to an intrinsic that does not require an unwind path and always returns.
|
||||
/// This avoids adding a new block and a terminator for simple intrinsics.
|
||||
|
|
|
@ -618,7 +618,9 @@ rustc_queries! {
|
|||
/// Summarizes coverage IDs inserted by the `InstrumentCoverage` MIR pass
|
||||
/// (for compiler option `-Cinstrument-coverage`), after MIR optimizations
|
||||
/// have had a chance to potentially remove some of them.
|
||||
query coverage_ids_info(key: ty::InstanceKind<'tcx>) -> &'tcx mir::coverage::CoverageIdsInfo {
|
||||
///
|
||||
/// Returns `None` for functions that were not instrumented.
|
||||
query coverage_ids_info(key: ty::InstanceKind<'tcx>) -> Option<&'tcx mir::coverage::CoverageIdsInfo> {
|
||||
desc { |tcx| "retrieving coverage IDs info from MIR for `{}`", tcx.def_path_str(key.def_id()) }
|
||||
arena_cache
|
||||
}
|
||||
|
|
|
@ -11,7 +11,9 @@ use rustc_middle::mir::coverage::{CounterId, CovTerm, Expression, ExpressionId,
|
|||
|
||||
use crate::coverage::counters::balanced_flow::BalancedFlowGraph;
|
||||
use crate::coverage::counters::iter_nodes::IterNodes;
|
||||
use crate::coverage::counters::node_flow::{CounterTerm, MergedNodeFlowGraph, NodeCounters};
|
||||
use crate::coverage::counters::node_flow::{
|
||||
CounterTerm, NodeCounters, make_node_counters, node_flow_data_for_balanced_graph,
|
||||
};
|
||||
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
|
||||
|
||||
mod balanced_flow;
|
||||
|
@ -27,12 +29,12 @@ pub(super) fn make_bcb_counters(
|
|||
) -> CoverageCounters {
|
||||
// Create the derived graphs that are necessary for subsequent steps.
|
||||
let balanced_graph = BalancedFlowGraph::for_graph(graph, |n| !graph[n].is_out_summable);
|
||||
let merged_graph = MergedNodeFlowGraph::for_balanced_graph(&balanced_graph);
|
||||
let node_flow_data = node_flow_data_for_balanced_graph(&balanced_graph);
|
||||
|
||||
// Use those graphs to determine which nodes get physical counters, and how
|
||||
// to compute the execution counts of other nodes from those counters.
|
||||
let nodes = make_node_counter_priority_list(graph, balanced_graph);
|
||||
let node_counters = merged_graph.make_node_counters(&nodes);
|
||||
let priority_list = make_node_flow_priority_list(graph, balanced_graph);
|
||||
let node_counters = make_node_counters(&node_flow_data, &priority_list);
|
||||
|
||||
// Convert the counters into a form suitable for embedding into MIR.
|
||||
transcribe_counters(&node_counters, bcb_needs_counter)
|
||||
|
@ -40,7 +42,7 @@ pub(super) fn make_bcb_counters(
|
|||
|
||||
/// Arranges the nodes in `balanced_graph` into a list, such that earlier nodes
|
||||
/// take priority in being given a counter expression instead of a physical counter.
|
||||
fn make_node_counter_priority_list(
|
||||
fn make_node_flow_priority_list(
|
||||
graph: &CoverageGraph,
|
||||
balanced_graph: BalancedFlowGraph<&CoverageGraph>,
|
||||
) -> Vec<BasicCoverageBlock> {
|
||||
|
@ -81,11 +83,11 @@ fn transcribe_counters(
|
|||
let mut new = CoverageCounters::with_num_bcbs(bcb_needs_counter.domain_size());
|
||||
|
||||
for bcb in bcb_needs_counter.iter() {
|
||||
// Our counter-creation algorithm doesn't guarantee that a counter
|
||||
// expression starts or ends with a positive term, so partition the
|
||||
// Our counter-creation algorithm doesn't guarantee that a node's list
|
||||
// of terms starts or ends with a positive term, so partition the
|
||||
// counters into "positive" and "negative" lists for easier handling.
|
||||
let (mut pos, mut neg): (Vec<_>, Vec<_>) =
|
||||
old.counter_expr(bcb).iter().partition_map(|&CounterTerm { node, op }| match op {
|
||||
old.counter_terms[bcb].iter().partition_map(|&CounterTerm { node, op }| match op {
|
||||
Op::Add => Either::Left(node),
|
||||
Op::Subtract => Either::Right(node),
|
||||
});
|
||||
|
|
|
@ -8,18 +8,17 @@
|
|||
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_index::bit_set::DenseBitSet;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
use rustc_index::{Idx, IndexSlice, IndexVec};
|
||||
use rustc_middle::mir::coverage::Op;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::coverage::counters::iter_nodes::IterNodes;
|
||||
use crate::coverage::counters::union_find::{FrozenUnionFind, UnionFind};
|
||||
use crate::coverage::counters::union_find::UnionFind;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// View of some underlying graph, in which each node's successors have been
|
||||
/// merged into a single "supernode".
|
||||
/// Data representing a view of some underlying graph, in which each node's
|
||||
/// successors have been merged into a single "supernode".
|
||||
///
|
||||
/// The resulting supernodes have no obvious meaning on their own.
|
||||
/// However, merging successor nodes means that a node's out-edges can all
|
||||
|
@ -30,10 +29,10 @@ mod tests;
|
|||
/// in the merged graph, it becomes possible to analyze the original node flows
|
||||
/// using techniques for analyzing edge flows.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MergedNodeFlowGraph<Node: Idx> {
|
||||
pub(crate) struct NodeFlowData<Node: Idx> {
|
||||
/// Maps each node to the supernode that contains it, indicated by some
|
||||
/// arbitrary "root" node that is part of that supernode.
|
||||
supernodes: FrozenUnionFind<Node>,
|
||||
supernodes: IndexVec<Node, Node>,
|
||||
/// For each node, stores the single supernode that all of its successors
|
||||
/// have been merged into.
|
||||
///
|
||||
|
@ -42,84 +41,71 @@ pub(crate) struct MergedNodeFlowGraph<Node: Idx> {
|
|||
succ_supernodes: IndexVec<Node, Node>,
|
||||
}
|
||||
|
||||
impl<Node: Idx> MergedNodeFlowGraph<Node> {
|
||||
/// Creates a "merged" view of an underlying graph.
|
||||
///
|
||||
/// The given graph is assumed to have [“balanced flow”](balanced-flow),
|
||||
/// though it does not necessarily have to be a `BalancedFlowGraph`.
|
||||
///
|
||||
/// [balanced-flow]: `crate::coverage::counters::balanced_flow::BalancedFlowGraph`.
|
||||
pub(crate) fn for_balanced_graph<G>(graph: G) -> Self
|
||||
where
|
||||
G: graph::DirectedGraph<Node = Node> + graph::Successors,
|
||||
{
|
||||
let mut supernodes = UnionFind::<G::Node>::new(graph.num_nodes());
|
||||
/// Creates a "merged" view of an underlying graph.
|
||||
///
|
||||
/// The given graph is assumed to have [“balanced flow”](balanced-flow),
|
||||
/// though it does not necessarily have to be a `BalancedFlowGraph`.
|
||||
///
|
||||
/// [balanced-flow]: `crate::coverage::counters::balanced_flow::BalancedFlowGraph`.
|
||||
pub(crate) fn node_flow_data_for_balanced_graph<G>(graph: G) -> NodeFlowData<G::Node>
|
||||
where
|
||||
G: graph::Successors,
|
||||
{
|
||||
let mut supernodes = UnionFind::<G::Node>::new(graph.num_nodes());
|
||||
|
||||
// For each node, merge its successors into a single supernode, and
|
||||
// arbitrarily choose one of those successors to represent all of them.
|
||||
let successors = graph
|
||||
.iter_nodes()
|
||||
.map(|node| {
|
||||
graph
|
||||
.successors(node)
|
||||
.reduce(|a, b| supernodes.unify(a, b))
|
||||
.expect("each node in a balanced graph must have at least one out-edge")
|
||||
})
|
||||
.collect::<IndexVec<G::Node, G::Node>>();
|
||||
// For each node, merge its successors into a single supernode, and
|
||||
// arbitrarily choose one of those successors to represent all of them.
|
||||
let successors = graph
|
||||
.iter_nodes()
|
||||
.map(|node| {
|
||||
graph
|
||||
.successors(node)
|
||||
.reduce(|a, b| supernodes.unify(a, b))
|
||||
.expect("each node in a balanced graph must have at least one out-edge")
|
||||
})
|
||||
.collect::<IndexVec<G::Node, G::Node>>();
|
||||
|
||||
// Now that unification is complete, freeze the supernode forest,
|
||||
// and resolve each arbitrarily-chosen successor to its canonical root.
|
||||
// (This avoids having to explicitly resolve them later.)
|
||||
let supernodes = supernodes.freeze();
|
||||
let succ_supernodes = successors.into_iter().map(|succ| supernodes.find(succ)).collect();
|
||||
// Now that unification is complete, take a snapshot of the supernode forest,
|
||||
// and resolve each arbitrarily-chosen successor to its canonical root.
|
||||
// (This avoids having to explicitly resolve them later.)
|
||||
let supernodes = supernodes.snapshot();
|
||||
let succ_supernodes = successors.into_iter().map(|succ| supernodes[succ]).collect();
|
||||
|
||||
Self { supernodes, succ_supernodes }
|
||||
NodeFlowData { supernodes, succ_supernodes }
|
||||
}
|
||||
|
||||
/// Uses the graph information in `node_flow_data`, together with a given
|
||||
/// permutation of all nodes in the graph, to create physical counters and
|
||||
/// counter expressions for each node in the underlying graph.
|
||||
///
|
||||
/// The given list must contain exactly one copy of each node in the
|
||||
/// underlying balanced-flow graph. The order of nodes is used as a hint to
|
||||
/// influence counter allocation:
|
||||
/// - Earlier nodes are more likely to receive counter expressions.
|
||||
/// - Later nodes are more likely to receive physical counters.
|
||||
pub(crate) fn make_node_counters<Node: Idx>(
|
||||
node_flow_data: &NodeFlowData<Node>,
|
||||
priority_list: &[Node],
|
||||
) -> NodeCounters<Node> {
|
||||
let mut builder = SpantreeBuilder::new(node_flow_data);
|
||||
|
||||
for &node in priority_list {
|
||||
builder.visit_node(node);
|
||||
}
|
||||
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.succ_supernodes.len()
|
||||
}
|
||||
|
||||
fn is_supernode(&self, node: Node) -> bool {
|
||||
self.supernodes.find(node) == node
|
||||
}
|
||||
|
||||
/// Using the information in this merged graph, together with a given
|
||||
/// permutation of all nodes in the graph, to create physical counters and
|
||||
/// counter expressions for each node in the underlying graph.
|
||||
///
|
||||
/// The given list must contain exactly one copy of each node in the
|
||||
/// underlying balanced-flow graph. The order of nodes is used as a hint to
|
||||
/// influence counter allocation:
|
||||
/// - Earlier nodes are more likely to receive counter expressions.
|
||||
/// - Later nodes are more likely to receive physical counters.
|
||||
pub(crate) fn make_node_counters(&self, all_nodes_permutation: &[Node]) -> NodeCounters<Node> {
|
||||
let mut builder = SpantreeBuilder::new(self);
|
||||
|
||||
for &node in all_nodes_permutation {
|
||||
builder.visit_node(node);
|
||||
}
|
||||
|
||||
NodeCounters { counter_exprs: builder.finish() }
|
||||
}
|
||||
NodeCounters { counter_terms: builder.finish() }
|
||||
}
|
||||
|
||||
/// End result of allocating physical counters and counter expressions for the
|
||||
/// nodes of a graph.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct NodeCounters<Node: Idx> {
|
||||
counter_exprs: IndexVec<Node, CounterExprVec<Node>>,
|
||||
}
|
||||
|
||||
impl<Node: Idx> NodeCounters<Node> {
|
||||
/// For the given node, returns the finished list of terms that represent
|
||||
/// its physical counter or counter expression. Always non-empty.
|
||||
///
|
||||
/// If a node was given a physical counter, its "expression" will contain
|
||||
/// If a node was given a physical counter, the term list will contain
|
||||
/// that counter as its sole element.
|
||||
pub(crate) fn counter_expr(&self, this: Node) -> &[CounterTerm<Node>] {
|
||||
self.counter_exprs[this].as_slice()
|
||||
}
|
||||
pub(crate) counter_terms: IndexVec<Node, Vec<CounterTerm<Node>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -146,12 +132,11 @@ pub(crate) struct CounterTerm<Node> {
|
|||
pub(crate) node: Node,
|
||||
}
|
||||
|
||||
/// Stores the list of counter terms that make up a node's counter expression.
|
||||
type CounterExprVec<Node> = SmallVec<[CounterTerm<Node>; 2]>;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SpantreeBuilder<'a, Node: Idx> {
|
||||
graph: &'a MergedNodeFlowGraph<Node>,
|
||||
supernodes: &'a IndexSlice<Node, Node>,
|
||||
succ_supernodes: &'a IndexSlice<Node, Node>,
|
||||
|
||||
is_unvisited: DenseBitSet<Node>,
|
||||
/// Links supernodes to each other, gradually forming a spanning tree of
|
||||
/// the merged-flow graph.
|
||||
|
@ -163,26 +148,32 @@ struct SpantreeBuilder<'a, Node: Idx> {
|
|||
yank_buffer: Vec<Node>,
|
||||
/// An in-progress counter expression for each node. Each expression is
|
||||
/// initially empty, and will be filled in as relevant nodes are visited.
|
||||
counter_exprs: IndexVec<Node, CounterExprVec<Node>>,
|
||||
counter_terms: IndexVec<Node, Vec<CounterTerm<Node>>>,
|
||||
}
|
||||
|
||||
impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
||||
fn new(graph: &'a MergedNodeFlowGraph<Node>) -> Self {
|
||||
let num_nodes = graph.num_nodes();
|
||||
fn new(node_flow_data: &'a NodeFlowData<Node>) -> Self {
|
||||
let NodeFlowData { supernodes, succ_supernodes } = node_flow_data;
|
||||
let num_nodes = supernodes.len();
|
||||
Self {
|
||||
graph,
|
||||
supernodes,
|
||||
succ_supernodes,
|
||||
is_unvisited: DenseBitSet::new_filled(num_nodes),
|
||||
span_edges: IndexVec::from_fn_n(|_| None, num_nodes),
|
||||
yank_buffer: vec![],
|
||||
counter_exprs: IndexVec::from_fn_n(|_| SmallVec::new(), num_nodes),
|
||||
counter_terms: IndexVec::from_fn_n(|_| vec![], num_nodes),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_supernode(&self, node: Node) -> bool {
|
||||
self.supernodes[node] == node
|
||||
}
|
||||
|
||||
/// Given a supernode, finds the supernode that is the "root" of its
|
||||
/// spantree component. Two nodes that have the same spantree root are
|
||||
/// connected in the spantree.
|
||||
fn spantree_root(&self, this: Node) -> Node {
|
||||
debug_assert!(self.graph.is_supernode(this));
|
||||
debug_assert!(self.is_supernode(this));
|
||||
|
||||
match self.span_edges[this] {
|
||||
None => this,
|
||||
|
@ -193,7 +184,7 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
|||
/// Rotates edges in the spantree so that `this` is the root of its
|
||||
/// spantree component.
|
||||
fn yank_to_spantree_root(&mut self, this: Node) {
|
||||
debug_assert!(self.graph.is_supernode(this));
|
||||
debug_assert!(self.is_supernode(this));
|
||||
|
||||
// The rotation is done iteratively, by first traversing from `this` to
|
||||
// its root and storing the path in a buffer, and then traversing the
|
||||
|
@ -235,12 +226,12 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
|||
|
||||
// Get the supernode containing `this`, and make it the root of its
|
||||
// component of the spantree.
|
||||
let this_supernode = self.graph.supernodes.find(this);
|
||||
let this_supernode = self.supernodes[this];
|
||||
self.yank_to_spantree_root(this_supernode);
|
||||
|
||||
// Get the supernode containing all of this's successors.
|
||||
let succ_supernode = self.graph.succ_supernodes[this];
|
||||
debug_assert!(self.graph.is_supernode(succ_supernode));
|
||||
let succ_supernode = self.succ_supernodes[this];
|
||||
debug_assert!(self.is_supernode(succ_supernode));
|
||||
|
||||
// If two supernodes are already connected in the spantree, they will
|
||||
// have the same spantree root. (Each supernode is connected to itself.)
|
||||
|
@ -268,8 +259,8 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
|||
// `this_supernode`.
|
||||
|
||||
// Instead of setting `this.measure = true` as in the original paper,
|
||||
// we just add the node's ID to its own "expression".
|
||||
self.counter_exprs[this].push(CounterTerm { node: this, op: Op::Add });
|
||||
// we just add the node's ID to its own list of terms.
|
||||
self.counter_terms[this].push(CounterTerm { node: this, op: Op::Add });
|
||||
|
||||
// Walk the spantree from `this.successor` back to `this`. For each
|
||||
// spantree edge along the way, add this node's physical counter to
|
||||
|
@ -279,7 +270,7 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
|||
let &SpantreeEdge { is_reversed, claiming_node, span_parent } =
|
||||
self.span_edges[curr].as_ref().unwrap();
|
||||
let op = if is_reversed { Op::Subtract } else { Op::Add };
|
||||
self.counter_exprs[claiming_node].push(CounterTerm { node: this, op });
|
||||
self.counter_terms[claiming_node].push(CounterTerm { node: this, op });
|
||||
|
||||
curr = span_parent;
|
||||
}
|
||||
|
@ -288,19 +279,20 @@ impl<'a, Node: Idx> SpantreeBuilder<'a, Node> {
|
|||
|
||||
/// Asserts that all nodes have been visited, and returns the computed
|
||||
/// counter expressions (made up of physical counters) for each node.
|
||||
fn finish(self) -> IndexVec<Node, CounterExprVec<Node>> {
|
||||
let Self { graph, is_unvisited, span_edges, yank_buffer: _, counter_exprs } = self;
|
||||
fn finish(self) -> IndexVec<Node, Vec<CounterTerm<Node>>> {
|
||||
let Self { ref span_edges, ref is_unvisited, ref counter_terms, .. } = self;
|
||||
assert!(is_unvisited.is_empty(), "some nodes were never visited: {is_unvisited:?}");
|
||||
debug_assert!(
|
||||
span_edges
|
||||
.iter_enumerated()
|
||||
.all(|(node, span_edge)| { span_edge.is_some() <= graph.is_supernode(node) }),
|
||||
.all(|(node, span_edge)| { span_edge.is_some() <= self.is_supernode(node) }),
|
||||
"only supernodes can have a span edge",
|
||||
);
|
||||
debug_assert!(
|
||||
counter_exprs.iter().all(|expr| !expr.is_empty()),
|
||||
"after visiting all nodes, every node should have a non-empty expression",
|
||||
counter_terms.iter().all(|terms| !terms.is_empty()),
|
||||
"after visiting all nodes, every node should have at least one term",
|
||||
);
|
||||
counter_exprs
|
||||
|
||||
self.counter_terms
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,12 @@ use rustc_data_structures::graph::vec_graph::VecGraph;
|
|||
use rustc_index::Idx;
|
||||
use rustc_middle::mir::coverage::Op;
|
||||
|
||||
use super::{CounterTerm, MergedNodeFlowGraph, NodeCounters};
|
||||
use crate::coverage::counters::node_flow::{
|
||||
CounterTerm, NodeCounters, NodeFlowData, make_node_counters, node_flow_data_for_balanced_graph,
|
||||
};
|
||||
|
||||
fn merged_node_flow_graph<G: graph::Successors>(graph: G) -> MergedNodeFlowGraph<G::Node> {
|
||||
MergedNodeFlowGraph::for_balanced_graph(graph)
|
||||
fn node_flow_data<G: graph::Successors>(graph: G) -> NodeFlowData<G::Node> {
|
||||
node_flow_data_for_balanced_graph(graph)
|
||||
}
|
||||
|
||||
fn make_graph<Node: Idx + Ord>(num_nodes: usize, edge_pairs: Vec<(Node, Node)>) -> VecGraph<Node> {
|
||||
|
@ -30,8 +32,8 @@ fn example_driver() {
|
|||
(4, 0),
|
||||
]);
|
||||
|
||||
let merged = merged_node_flow_graph(&graph);
|
||||
let counters = merged.make_node_counters(&[3, 1, 2, 0, 4]);
|
||||
let node_flow_data = node_flow_data(&graph);
|
||||
let counters = make_node_counters(&node_flow_data, &[3, 1, 2, 0, 4]);
|
||||
|
||||
assert_eq!(format_counter_expressions(&counters), &[
|
||||
// (comment to force vertical formatting for clarity)
|
||||
|
@ -53,12 +55,12 @@ fn format_counter_expressions<Node: Idx>(counters: &NodeCounters<Node>) -> Vec<S
|
|||
};
|
||||
|
||||
counters
|
||||
.counter_exprs
|
||||
.counter_terms
|
||||
.indices()
|
||||
.map(|node| {
|
||||
let mut expr = counters.counter_expr(node).iter().collect::<Vec<_>>();
|
||||
expr.sort_by_key(|item| item.node.index());
|
||||
format!("[{node:?}]: {}", expr.into_iter().map(format_item).join(" "))
|
||||
let mut terms = counters.counter_terms[node].iter().collect::<Vec<_>>();
|
||||
terms.sort_by_key(|item| item.node.index());
|
||||
format!("[{node:?}]: {}", terms.into_iter().map(format_item).join(" "))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -88,29 +88,9 @@ impl<Key: Idx> UnionFind<Key> {
|
|||
a
|
||||
}
|
||||
|
||||
/// Creates a snapshot of this disjoint-set forest that can no longer be
|
||||
/// mutated, but can be queried without mutation.
|
||||
pub(crate) fn freeze(&mut self) -> FrozenUnionFind<Key> {
|
||||
// Just resolve each key to its actual root.
|
||||
let roots = self.table.indices().map(|key| self.find(key)).collect();
|
||||
FrozenUnionFind { roots }
|
||||
}
|
||||
}
|
||||
|
||||
/// Snapshot of a disjoint-set forest that can no longer be mutated, but can be
|
||||
/// queried in O(1) time without mutation.
|
||||
///
|
||||
/// This is really just a wrapper around a direct mapping from keys to roots,
|
||||
/// but with a [`Self::find`] method that resembles [`UnionFind::find`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FrozenUnionFind<Key: Idx> {
|
||||
roots: IndexVec<Key, Key>,
|
||||
}
|
||||
|
||||
impl<Key: Idx> FrozenUnionFind<Key> {
|
||||
/// Returns the "root" key of the disjoint-set containing the given key.
|
||||
/// If two keys have the same root, they belong to the same set.
|
||||
pub(crate) fn find(&self, key: Key) -> Key {
|
||||
self.roots[key]
|
||||
/// Takes a "snapshot" of the current state of this disjoint-set forest, in
|
||||
/// the form of a vector that directly maps each key to its current root.
|
||||
pub(crate) fn snapshot(&mut self) -> IndexVec<Key, Key> {
|
||||
self.table.indices().map(|key| self.find(key)).collect()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,7 +180,12 @@ fn create_mappings(
|
|||
));
|
||||
|
||||
for (decision, branches) in mcdc_mappings {
|
||||
let num_conditions = branches.len() as u16;
|
||||
// FIXME(#134497): Previously it was possible for some of these branch
|
||||
// conversions to fail, in which case the remaining branches in the
|
||||
// decision would be degraded to plain `MappingKind::Branch`.
|
||||
// The changes in #134497 made that failure impossible, because the
|
||||
// fallible step was deferred to codegen. But the corresponding code
|
||||
// in codegen wasn't updated to detect the need for a degrade step.
|
||||
let conditions = branches
|
||||
.into_iter()
|
||||
.map(
|
||||
|
@ -206,24 +211,13 @@ fn create_mappings(
|
|||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if conditions.len() == num_conditions as usize {
|
||||
// LLVM requires end index for counter mapping regions.
|
||||
let kind = MappingKind::MCDCDecision(DecisionInfo {
|
||||
bitmap_idx: (decision.bitmap_idx + decision.num_test_vectors) as u32,
|
||||
num_conditions,
|
||||
});
|
||||
let span = decision.span;
|
||||
mappings.extend(std::iter::once(Mapping { kind, span }).chain(conditions.into_iter()));
|
||||
} else {
|
||||
mappings.extend(conditions.into_iter().map(|mapping| {
|
||||
let MappingKind::MCDCBranch { true_term, false_term, mcdc_params: _ } =
|
||||
mapping.kind
|
||||
else {
|
||||
unreachable!("all mappings here are MCDCBranch as shown above");
|
||||
};
|
||||
Mapping { kind: MappingKind::Branch { true_term, false_term }, span: mapping.span }
|
||||
}))
|
||||
}
|
||||
// LLVM requires end index for counter mapping regions.
|
||||
let kind = MappingKind::MCDCDecision(DecisionInfo {
|
||||
bitmap_idx: (decision.bitmap_idx + decision.num_test_vectors) as u32,
|
||||
num_conditions: u16::try_from(conditions.len()).unwrap(),
|
||||
});
|
||||
let span = decision.span;
|
||||
mappings.extend(std::iter::once(Mapping { kind, span }).chain(conditions.into_iter()));
|
||||
}
|
||||
|
||||
mappings
|
||||
|
|
|
@ -87,15 +87,9 @@ fn coverage_attr_on(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
|
|||
fn coverage_ids_info<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
instance_def: ty::InstanceKind<'tcx>,
|
||||
) -> CoverageIdsInfo {
|
||||
) -> Option<CoverageIdsInfo> {
|
||||
let mir_body = tcx.instance_mir(instance_def);
|
||||
|
||||
let Some(fn_cov_info) = mir_body.function_coverage_info.as_deref() else {
|
||||
return CoverageIdsInfo {
|
||||
counters_seen: DenseBitSet::new_empty(0),
|
||||
zero_expressions: DenseBitSet::new_empty(0),
|
||||
};
|
||||
};
|
||||
let fn_cov_info = mir_body.function_coverage_info.as_deref()?;
|
||||
|
||||
let mut counters_seen = DenseBitSet::new_empty(fn_cov_info.num_counters);
|
||||
let mut expressions_seen = DenseBitSet::new_filled(fn_cov_info.expressions.len());
|
||||
|
@ -129,7 +123,7 @@ fn coverage_ids_info<'tcx>(
|
|||
let zero_expressions =
|
||||
identify_zero_expressions(fn_cov_info, &counters_seen, &expressions_seen);
|
||||
|
||||
CoverageIdsInfo { counters_seen, zero_expressions }
|
||||
Some(CoverageIdsInfo { counters_seen, zero_expressions })
|
||||
}
|
||||
|
||||
fn all_coverage_in_mir_body<'a, 'tcx>(
|
||||
|
|
|
@ -2511,9 +2511,9 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Removes and returns the last element in a vector if the predicate
|
||||
/// Removes and returns the last element from a vector if the predicate
|
||||
/// returns `true`, or [`None`] if the predicate returns false or the vector
|
||||
/// is empty.
|
||||
/// is empty (the predicate will not be called in that case).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -2528,12 +2528,9 @@ impl<T, A: Allocator> Vec<T, A> {
|
|||
/// assert_eq!(vec.pop_if(pred), None);
|
||||
/// ```
|
||||
#[unstable(feature = "vec_pop_if", issue = "122741")]
|
||||
pub fn pop_if<F>(&mut self, f: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut T) -> bool,
|
||||
{
|
||||
pub fn pop_if(&mut self, predicate: impl FnOnce(&mut T) -> bool) -> Option<T> {
|
||||
let last = self.last_mut()?;
|
||||
if f(last) { self.pop() } else { None }
|
||||
if predicate(last) { self.pop() } else { None }
|
||||
}
|
||||
|
||||
/// Moves all the elements of `other` into `self`, leaving `other` empty.
|
||||
|
@ -3016,10 +3013,9 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
|
|||
/// Iterates over the slice `other`, clones each element, and then appends
|
||||
/// it to this `Vec`. The `other` slice is traversed in-order.
|
||||
///
|
||||
/// Note that this function is same as [`extend`] except that it is
|
||||
/// specialized to work with slices instead. If and when Rust gets
|
||||
/// specialization this function will likely be deprecated (but still
|
||||
/// available).
|
||||
/// Note that this function is the same as [`extend`],
|
||||
/// except that it also works with slice elements that are Clone but not Copy.
|
||||
/// If Rust gets specialization this function may be deprecated.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
This file offers some tips on the coding conventions for rustc. This
|
||||
This file offers some tips on the coding conventions for rustc. This
|
||||
chapter covers [formatting](#formatting), [coding for correctness](#cc),
|
||||
[using crates from crates.io](#cio), and some tips on
|
||||
[structuring your PR for easy review](#er).
|
||||
|
@ -25,6 +25,7 @@ pass the <!-- date-check: nov 2022 --> `--edition=2021` argument yourself when c
|
|||
`rustfmt` directly.
|
||||
|
||||
[fmt]: https://github.com/rust-dev-tools/fmt-rfcs
|
||||
|
||||
[`rustfmt`]:https://github.com/rust-lang/rustfmt
|
||||
|
||||
## Formatting C++ code
|
||||
|
@ -40,6 +41,26 @@ When modifying that code, use this command to format it:
|
|||
This uses a pinned version of `clang-format`, to avoid relying on the local
|
||||
environment.
|
||||
|
||||
## Formatting and linting Python code
|
||||
|
||||
The Rust repository contains quite a lof of Python code. We try to keep
|
||||
it both linted and formatted by the [ruff][ruff] tool.
|
||||
|
||||
When modifying Python code, use this command to format it:
|
||||
```sh
|
||||
./x test tidy --extra-checks=py:fmt --bless
|
||||
```
|
||||
|
||||
and the following command to run lints:
|
||||
```sh
|
||||
./x test tidy --extra-checks=py:lint
|
||||
```
|
||||
|
||||
This uses a pinned version of `ruff`, to avoid relying on the local
|
||||
environment.
|
||||
|
||||
[ruff]: https://github.com/astral-sh/ruff
|
||||
|
||||
<a id="copyright"></a>
|
||||
|
||||
<!-- REUSE-IgnoreStart -->
|
||||
|
@ -84,7 +105,7 @@ Using `_` in a match is convenient, but it means that when new
|
|||
variants are added to the enum, they may not get handled correctly.
|
||||
Ask yourself: if a new variant were added to this enum, what's the
|
||||
chance that it would want to use the `_` code, versus having some
|
||||
other treatment? Unless the answer is "low", then prefer an
|
||||
other treatment? Unless the answer is "low", then prefer an
|
||||
exhaustive match. (The same advice applies to `if let` and `while
|
||||
let`, which are effectively tests for a single variant.)
|
||||
|
||||
|
@ -124,7 +145,7 @@ See the [crates.io dependencies][crates] section.
|
|||
# How to structure your PR
|
||||
|
||||
How you prepare the commits in your PR can make a big difference for the
|
||||
reviewer. Here are some tips.
|
||||
reviewer. Here are some tips.
|
||||
|
||||
**Isolate "pure refactorings" into their own commit.** For example, if
|
||||
you rename a method, then put that rename into its own commit, along
|
||||
|
@ -165,4 +186,5 @@ to the compiler.
|
|||
crate-related, often the spelling is changed to `krate`.
|
||||
|
||||
[tcx]: ./ty.md
|
||||
|
||||
[crates]: ./crates-io.md
|
||||
|
|
|
@ -94,7 +94,7 @@ for more details.
|
|||
| Directive | Explanation | Supported test suites | Possible values |
|
||||
|-----------------------------------|--------------------------------------------------------------------------------------------------------------------------|----------------------------------------------|-----------------------------------------------------------------------------------------|
|
||||
| `check-run-results` | Check run test binary `run-{pass,fail}` output snapshot | `ui`, `crashes`, `incremental` if `run-pass` | N/A |
|
||||
| `error-pattern` | Check that output contains a specific string | `ui`, `crashes`, `incremental` if `run-pass` | String |
|
||||
| `error-pattern` | Check that output contains a specific string | `ui`, `crashes`, `incremental` if `run-pass` | String |
|
||||
| `regex-error-pattern` | Check that output contains a regex pattern | `ui`, `crashes`, `incremental` if `run-pass` | Regex |
|
||||
| `check-stdout` | Check `stdout` against `error-pattern`s from running test binary[^check_stdout] | `ui`, `crashes`, `incremental` | N/A |
|
||||
| `normalize-stderr-32bit` | Normalize actual stderr (for 32-bit platforms) with a rule `"<raw>" -> "<normalized>"` before comparing against snapshot | `ui`, `incremental` | `"<RAW>" -> "<NORMALIZED>"`, `<RAW>`/`<NORMALIZED>` is regex capture and replace syntax |
|
||||
|
@ -176,6 +176,7 @@ settings:
|
|||
- `needs-rust-lld` — ignores if the rust lld support is not enabled (`rust.lld =
|
||||
true` in `config.toml`)
|
||||
- `needs-threads` — ignores if the target does not have threading support
|
||||
- `needs-subprocess` — ignores if the target does not have subprocess support
|
||||
- `needs-symlink` — ignores if the target does not support symlinks. This can be
|
||||
the case on Windows if the developer did not enable privileged symlink
|
||||
permissions.
|
||||
|
|
|
@ -488,6 +488,17 @@ impl Config {
|
|||
git_merge_commit_email: &self.git_merge_commit_email,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_subprocess_support(&self) -> bool {
|
||||
// FIXME(#135928): compiletest is always a **host** tool. Building and running an
|
||||
// capability detection executable against the **target** is not trivial. The short term
|
||||
// solution here is to hard-code some targets to allow/deny, unfortunately.
|
||||
|
||||
let unsupported_target = self.target_cfg().env == "sgx"
|
||||
|| matches!(self.target_cfg().arch.as_str(), "wasm32" | "wasm64")
|
||||
|| self.target_cfg().os == "emscripten";
|
||||
!unsupported_target
|
||||
}
|
||||
}
|
||||
|
||||
/// Known widths of `target_has_atomic`.
|
||||
|
|
|
@ -152,6 +152,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
|
|||
"needs-sanitizer-support",
|
||||
"needs-sanitizer-thread",
|
||||
"needs-std-debug-assertions",
|
||||
"needs-subprocess",
|
||||
"needs-symlink",
|
||||
"needs-target-has-atomic",
|
||||
"needs-threads",
|
||||
|
|
|
@ -94,6 +94,11 @@ pub(super) fn handle_needs(
|
|||
condition: config.has_threads(),
|
||||
ignore_reason: "ignored on targets without threading support",
|
||||
},
|
||||
Need {
|
||||
name: "needs-subprocess",
|
||||
condition: config.has_subprocess_support(),
|
||||
ignore_reason: "ignored on targets without subprocess support",
|
||||
},
|
||||
Need {
|
||||
name: "needs-unwind",
|
||||
condition: config.can_unwind(),
|
||||
|
@ -351,6 +356,9 @@ fn find_dlltool(config: &Config) -> bool {
|
|||
dlltool_found
|
||||
}
|
||||
|
||||
// FIXME(#135928): this is actually not quite right because this detection is run on the **host**.
|
||||
// This however still helps the case of windows -> windows local development in case symlinks are
|
||||
// not available.
|
||||
#[cfg(windows)]
|
||||
fn has_symlinks() -> bool {
|
||||
if std::env::var_os("CI").is_some() {
|
||||
|
|
|
@ -89,74 +89,45 @@ fn check_impl(
|
|||
|
||||
if python_lint {
|
||||
eprintln!("linting python files");
|
||||
let mut cfg_args_ruff = cfg_args.clone();
|
||||
let mut file_args_ruff = file_args.clone();
|
||||
|
||||
let mut cfg_path = root_path.to_owned();
|
||||
cfg_path.extend(RUFF_CONFIG_PATH);
|
||||
let mut cache_dir = outdir.to_owned();
|
||||
cache_dir.extend(RUFF_CACHE_PATH);
|
||||
|
||||
cfg_args_ruff.extend([
|
||||
"--config".as_ref(),
|
||||
cfg_path.as_os_str(),
|
||||
"--cache-dir".as_ref(),
|
||||
cache_dir.as_os_str(),
|
||||
]);
|
||||
|
||||
if file_args_ruff.is_empty() {
|
||||
file_args_ruff.push(root_path.as_os_str());
|
||||
}
|
||||
|
||||
let mut args = merge_args(&cfg_args_ruff, &file_args_ruff);
|
||||
args.insert(0, "check".as_ref());
|
||||
let res = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
let py_path = py_path.as_ref().unwrap();
|
||||
let res = run_ruff(root_path, outdir, py_path, &cfg_args, &file_args, &["check".as_ref()]);
|
||||
|
||||
if res.is_err() && show_diff {
|
||||
eprintln!("\npython linting failed! Printing diff suggestions:");
|
||||
|
||||
args.insert(1, "--diff".as_ref());
|
||||
let _ = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
let _ = run_ruff(root_path, outdir, py_path, &cfg_args, &file_args, &[
|
||||
"check".as_ref(),
|
||||
"--diff".as_ref(),
|
||||
]);
|
||||
}
|
||||
// Rethrow error
|
||||
let _ = res?;
|
||||
}
|
||||
|
||||
if python_fmt {
|
||||
let mut cfg_args_ruff = cfg_args.clone();
|
||||
let mut file_args_ruff = file_args.clone();
|
||||
|
||||
let mut args: Vec<&OsStr> = vec!["format".as_ref()];
|
||||
if bless {
|
||||
eprintln!("formatting python files");
|
||||
} else {
|
||||
eprintln!("checking python file formatting");
|
||||
cfg_args_ruff.push("--check".as_ref());
|
||||
args.push("--check".as_ref());
|
||||
}
|
||||
|
||||
let mut cfg_path = root_path.to_owned();
|
||||
cfg_path.extend(RUFF_CONFIG_PATH);
|
||||
let mut cache_dir = outdir.to_owned();
|
||||
cache_dir.extend(RUFF_CACHE_PATH);
|
||||
let py_path = py_path.as_ref().unwrap();
|
||||
let res = run_ruff(root_path, outdir, py_path, &cfg_args, &file_args, &args);
|
||||
|
||||
cfg_args_ruff.extend(["--config".as_ref(), cfg_path.as_os_str()]);
|
||||
|
||||
if file_args_ruff.is_empty() {
|
||||
file_args_ruff.push(root_path.as_os_str());
|
||||
}
|
||||
|
||||
let mut args = merge_args(&cfg_args_ruff, &file_args_ruff);
|
||||
args.insert(0, "format".as_ref());
|
||||
let res = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
|
||||
if res.is_err() && show_diff {
|
||||
eprintln!("\npython formatting does not match! Printing diff:");
|
||||
|
||||
args.insert(0, "--diff".as_ref());
|
||||
let _ = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
}
|
||||
if res.is_err() && !bless {
|
||||
if show_diff {
|
||||
eprintln!("\npython formatting does not match! Printing diff:");
|
||||
|
||||
let _ = run_ruff(root_path, outdir, py_path, &cfg_args, &file_args, &[
|
||||
"format".as_ref(),
|
||||
"--diff".as_ref(),
|
||||
]);
|
||||
}
|
||||
eprintln!("rerun tidy with `--extra-checks=py:fmt --bless` to reformat Python code");
|
||||
}
|
||||
|
||||
// Rethrow error
|
||||
let _ = res?;
|
||||
}
|
||||
|
@ -247,6 +218,38 @@ fn check_impl(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn run_ruff(
|
||||
root_path: &Path,
|
||||
outdir: &Path,
|
||||
py_path: &Path,
|
||||
cfg_args: &[&OsStr],
|
||||
file_args: &[&OsStr],
|
||||
ruff_args: &[&OsStr],
|
||||
) -> Result<(), Error> {
|
||||
let mut cfg_args_ruff = cfg_args.into_iter().copied().collect::<Vec<_>>();
|
||||
let mut file_args_ruff = file_args.into_iter().copied().collect::<Vec<_>>();
|
||||
|
||||
let mut cfg_path = root_path.to_owned();
|
||||
cfg_path.extend(RUFF_CONFIG_PATH);
|
||||
let mut cache_dir = outdir.to_owned();
|
||||
cache_dir.extend(RUFF_CACHE_PATH);
|
||||
|
||||
cfg_args_ruff.extend([
|
||||
"--config".as_ref(),
|
||||
cfg_path.as_os_str(),
|
||||
"--cache-dir".as_ref(),
|
||||
cache_dir.as_os_str(),
|
||||
]);
|
||||
|
||||
if file_args_ruff.is_empty() {
|
||||
file_args_ruff.push(root_path.as_os_str());
|
||||
}
|
||||
|
||||
let mut args: Vec<&OsStr> = ruff_args.into_iter().copied().collect();
|
||||
args.extend(merge_args(&cfg_args_ruff, &file_args_ruff));
|
||||
py_runner(py_path, true, None, "ruff", &args)
|
||||
}
|
||||
|
||||
/// Helper to create `cfg1 cfg2 -- file1 file2` output
|
||||
fn merge_args<'a>(cfg_args: &[&'a OsStr], file_args: &[&'a OsStr]) -> Vec<&'a OsStr> {
|
||||
let mut args = cfg_args.to_owned();
|
||||
|
@ -321,8 +324,16 @@ fn get_or_create_venv(venv_path: &Path, src_reqs_path: &Path) -> Result<PathBuf,
|
|||
fn create_venv_at_path(path: &Path) -> Result<(), Error> {
|
||||
/// Preferred python versions in order. Newest to oldest then current
|
||||
/// development versions
|
||||
const TRY_PY: &[&str] =
|
||||
&["python3.11", "python3.10", "python3.9", "python3", "python", "python3.12", "python3.13"];
|
||||
const TRY_PY: &[&str] = &[
|
||||
"python3.13",
|
||||
"python3.12",
|
||||
"python3.11",
|
||||
"python3.10",
|
||||
"python3.9",
|
||||
"python3",
|
||||
"python",
|
||||
"python3.14",
|
||||
];
|
||||
|
||||
let mut sys_py = None;
|
||||
let mut found = Vec::new();
|
||||
|
@ -357,22 +368,40 @@ fn create_venv_at_path(path: &Path) -> Result<(), Error> {
|
|||
return Err(ret);
|
||||
};
|
||||
|
||||
eprintln!("creating virtual environment at '{}' using '{sys_py}'", path.display());
|
||||
let out = Command::new(sys_py).args(["-m", "virtualenv"]).arg(path).output().unwrap();
|
||||
// First try venv, which should be packaged in the Python3 standard library.
|
||||
// If it is not available, try to create the virtual environment using the
|
||||
// virtualenv package.
|
||||
if try_create_venv(sys_py, path, "venv").is_ok() {
|
||||
return Ok(());
|
||||
}
|
||||
try_create_venv(sys_py, path, "virtualenv")
|
||||
}
|
||||
|
||||
fn try_create_venv(python: &str, path: &Path, module: &str) -> Result<(), Error> {
|
||||
eprintln!(
|
||||
"creating virtual environment at '{}' using '{python}' and '{module}'",
|
||||
path.display()
|
||||
);
|
||||
let out = Command::new(python).args(["-m", module]).arg(path).output().unwrap();
|
||||
|
||||
if out.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let stderr = String::from_utf8_lossy(&out.stderr);
|
||||
let err = if stderr.contains("No module named virtualenv") {
|
||||
let err = if stderr.contains(&format!("No module named {module}")) {
|
||||
Error::Generic(format!(
|
||||
"virtualenv not found: you may need to install it \
|
||||
(`{sys_py} -m pip install virtualenv`)"
|
||||
r#"{module} not found: you may need to install it:
|
||||
`{python} -m pip install {module}`
|
||||
If you see an error about "externally managed environment" when running the above command,
|
||||
either install `{module}` using your system package manager
|
||||
(e.g. `sudo apt-get install {python}-{module}`) or create a virtual environment manually, install
|
||||
`{module}` in it and then activate it before running tidy.
|
||||
"#
|
||||
))
|
||||
} else {
|
||||
Error::Generic(format!(
|
||||
"failed to create venv at '{}' using {sys_py}: {stderr}",
|
||||
"failed to create venv at '{}' using {python} -m {module}: {stderr}",
|
||||
path.display()
|
||||
))
|
||||
};
|
||||
|
|
|
@ -2204,7 +2204,6 @@ ui/issues/issue-3895.rs
|
|||
ui/issues/issue-38954.rs
|
||||
ui/issues/issue-38987.rs
|
||||
ui/issues/issue-39089.rs
|
||||
ui/issues/issue-39175.rs
|
||||
ui/issues/issue-39211.rs
|
||||
ui/issues/issue-39367.rs
|
||||
ui/issues/issue-39548.rs
|
||||
|
|
|
@ -17,7 +17,7 @@ use ignore::Walk;
|
|||
const ENTRY_LIMIT: u32 = 901;
|
||||
// FIXME: The following limits should be reduced eventually.
|
||||
|
||||
const ISSUES_ENTRY_LIMIT: u32 = 1664;
|
||||
const ISSUES_ENTRY_LIMIT: u32 = 1662;
|
||||
|
||||
const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[
|
||||
"rs", // test source files
|
||||
|
|
|
@ -50,9 +50,9 @@ struct ThreeU8s(u8, u8, u8);
|
|||
|
||||
// CHECK-LABEL: read_large
|
||||
// aix: lwz [[REG1:.*]], 16(4)
|
||||
// aix-NEXT: lxvd2x 0, 0, 4
|
||||
// aix-NEXT: lxv{{d2x|w4x}} 0, 0, 4
|
||||
// aix-NEXT: stw [[REG1]], 16(3)
|
||||
// aix-NEXT: stxvd2x 0, 0, 3
|
||||
// aix-NEXT: stxv{{d2x|w4x}} 0, 0, 3
|
||||
// be: lwz [[REG1:.*]], 16(4)
|
||||
// be-NEXT: stw [[REG1]], 16(3)
|
||||
// be-NEXT: ld [[REG2:.*]], 8(4)
|
||||
|
@ -118,8 +118,8 @@ extern "C" fn read_small(x: &ThreeU8s) -> ThreeU8s {
|
|||
// aix-NEXT: std 4, 56(1)
|
||||
// aix-NEXT: stw [[REG1]], 16(6)
|
||||
// aix-NEXT: addi [[REG2:.*]], 1, 48
|
||||
// aix-NEXT: lxvd2x 0, 0, [[REG2]]
|
||||
// aix-NEXT: stxvd2x 0, 0, 6
|
||||
// aix-NEXT: lxv{{d2x|w4x}} 0, 0, [[REG2]]
|
||||
// aix-NEXT: stxv{{d2x|w4x}} 0, 0, 6
|
||||
// elf: std 3, 0(6)
|
||||
// be-NEXT: rldicl [[REG1:.*]], 5, 32, 32
|
||||
// elf-NEXT: std 4, 8(6)
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
fn bar() -> bool {
|
||||
let mut _0: bool;
|
||||
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:19:18: 21:2 (#0)
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:19:1: 21:2 (#0);
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:29:18: 31:2 (#0)
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:29:1: 31:2 (#0);
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::CounterIncrement(0);
|
||||
|
|
|
@ -7,13 +7,13 @@
|
|||
let mut _2: bool;
|
||||
let mut _3: !;
|
||||
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:10:11: 16:2 (#0)
|
||||
+ coverage body span: $DIR/instrument_coverage.rs:14:11: 20:2 (#0)
|
||||
+ coverage ExpressionId(0) => Expression { lhs: Counter(1), op: Subtract, rhs: Counter(0) };
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:10:1: 10:11 (#0);
|
||||
+ coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:12:12: 12:17 (#0);
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:13:13: 13:18 (#0);
|
||||
+ coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:14:10: 14:10 (#0);
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:16:2: 16:2 (#0);
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:14:1: 14:11 (#0);
|
||||
+ coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:16:12: 16:17 (#0);
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:17:13: 17:18 (#0);
|
||||
+ coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:18:10: 18:10 (#0);
|
||||
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:20:2: 20:2 (#0);
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::CounterIncrement(0);
|
||||
|
|
|
@ -6,7 +6,11 @@
|
|||
//@ compile-flags: -Cinstrument-coverage -Zno-profiler-runtime
|
||||
|
||||
// EMIT_MIR instrument_coverage.main.InstrumentCoverage.diff
|
||||
// EMIT_MIR instrument_coverage.bar.InstrumentCoverage.diff
|
||||
// CHECK-LABEL: fn main()
|
||||
// CHECK: coverage body span:
|
||||
// CHECK: coverage Code(Counter({{[0-9]+}})) =>
|
||||
// CHECK: bb0:
|
||||
// CHECK: Coverage::CounterIncrement
|
||||
fn main() {
|
||||
loop {
|
||||
if bar() {
|
||||
|
@ -15,14 +19,13 @@ fn main() {
|
|||
}
|
||||
}
|
||||
|
||||
// EMIT_MIR instrument_coverage.bar.InstrumentCoverage.diff
|
||||
// CHECK-LABEL: fn bar()
|
||||
// CHECK: coverage body span:
|
||||
// CHECK: coverage Code(Counter({{[0-9]+}})) =>
|
||||
// CHECK: bb0:
|
||||
// CHECK: Coverage::CounterIncrement
|
||||
#[inline(never)]
|
||||
fn bar() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// CHECK: coverage ExpressionId({{[0-9]+}}) =>
|
||||
// CHECK-DAG: coverage Code(Counter({{[0-9]+}})) =>
|
||||
// CHECK-DAG: coverage Code(Expression({{[0-9]+}})) =>
|
||||
// CHECK: bb0:
|
||||
// CHECK-DAG: Coverage::ExpressionUsed({{[0-9]+}})
|
||||
// CHECK-DAG: Coverage::CounterIncrement({{[0-9]+}})
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
// without #[repr(simd)]
|
||||
|
||||
//@ run-pass
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#![feature(avx512_target_feature)]
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//@ run-pass
|
||||
//@ ignore-wasm32 can't run commands
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
//@ ignore-fuchsia must translate zircon signal to SIGSEGV/SIGBUS, FIXME (#58590)
|
||||
|
||||
#![feature(rustc_private)]
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
//@[aarch64] only-aarch64
|
||||
//@[x32] only-x86
|
||||
//@[x64] only-x86_64
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
//@ ignore-musl FIXME #31506
|
||||
//@ ignore-fuchsia no exception handler registered for segfault
|
||||
//@ compile-flags: -C lto
|
||||
|
|
|
@ -3,8 +3,7 @@
|
|||
//@[aarch64] only-aarch64
|
||||
//@[x32] only-x86
|
||||
//@[x64] only-x86_64
|
||||
//@ ignore-emscripten no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
//@ ignore-fuchsia no exception handler registered for segfault
|
||||
//@ ignore-nto Crash analysis impossible at SIGSEGV in QNX Neutrino
|
||||
//@ ignore-ios Stack probes are enabled, but the SIGSEGV handler isn't
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//@ run-pass
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::alloc::{Layout, handle_alloc_error};
|
||||
use std::env;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:index out of bounds
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::mem::size_of;
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ needs-threads
|
||||
|
||||
#![allow(overflowing_literals)]
|
||||
|
||||
// Test that we cleanup a fixed size Box<[D; k]> properly when D has a
|
||||
// destructor.
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
|
||||
use std::thread;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ needs-threads
|
||||
|
||||
#![allow(overflowing_literals)]
|
||||
|
||||
// Test that we cleanup dynamic sized Box<[D]> properly when D has a
|
||||
// destructor.
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
|
||||
use std::thread;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
//@ run-fail
|
||||
//@ error-pattern:index out of bounds
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#[allow(unconditional_panic)]
|
||||
fn main() {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
#![allow(overflowing_literals)]
|
||||
//@ needs-threads
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
#![allow(overflowing_literals)]
|
||||
|
||||
// Test that using the `vec!` macro nested within itself works when
|
||||
// the contents implement Drop and we hit a panic in the middle of
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// Test that if a slicing expr[..] fails, the correct cleanups happen.
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// Test that if a slicing expr[..] fails, the correct cleanups happen.
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:index out of bounds: the len is 1 but the index is 2
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
let v: Vec<isize> = vec![10];
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
//@ run-pass
|
||||
//@ ignore-android FIXME #17520
|
||||
//@ ignore-wasm32 spawning processes is not supported
|
||||
//@ needs-subprocess
|
||||
//@ ignore-openbsd no support for libbacktrace without filename
|
||||
//@ ignore-sgx no processes
|
||||
//@ ignore-msvc see #62897 and `backtrace-debuginfo.rs` test
|
||||
//@ ignore-fuchsia Backtraces not symbolized
|
||||
//@ compile-flags:-g
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
//@ run-pass
|
||||
//@ ignore-android FIXME #17520
|
||||
//@ ignore-wasm32 spawning processes is not supported
|
||||
//@ needs-subprocess
|
||||
//@ ignore-openbsd no support for libbacktrace without filename
|
||||
//@ ignore-sgx no processes
|
||||
//@ ignore-fuchsia Backtraces not symbolized
|
||||
//@ compile-flags:-g
|
||||
//@ compile-flags:-Cstrip=none
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:quux
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn foo() -> ! {
|
||||
panic!("quux");
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:quux
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn my_err(s: String) -> ! {
|
||||
println!("{}", s);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:panic 1
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
let x = 2;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
//@ run-fail
|
||||
//@ error-pattern:explicit panic
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
struct Parser<'i: 't, 't>(&'i u8, &'t u8);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
use std::thread;
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
// It's unclear how likely such a bug is to recur, but it seems like a
|
||||
// scenario worth testing.
|
||||
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
use std::thread;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:oops
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
let func = || -> ! {
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
//@ run-pass
|
||||
|
||||
//@ ignore-windows - this is a unix-specific test
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ only-unix (this is a unix-specific test)
|
||||
//@ needs-subprocess
|
||||
use std::env;
|
||||
use std::os::unix::process::CommandExt;
|
||||
use std::process::Command;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ no-prefer-dynamic We move the binary around, so do not depend dynamically on libstd
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
//@ ignore-fuchsia Needs directory creation privilege
|
||||
|
||||
use std::env;
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
//@ run-pass
|
||||
|
||||
#![allow(stable_features)]
|
||||
//@ ignore-windows - this is a unix-specific test
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ only-unix (this is a unix-specific test)
|
||||
//@ needs-subprocess
|
||||
//@ ignore-fuchsia no execvp syscall provided
|
||||
|
||||
#![feature(process_exec)]
|
||||
|
||||
use std::env;
|
||||
use std::os::unix::process::CommandExt;
|
||||
use std::process::Command;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
//@ run-pass
|
||||
|
||||
#![allow(stable_features)]
|
||||
//@ ignore-windows - this is a unix-specific test
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ only-unix (this is a unix-specific test)
|
||||
//@ needs-subprocess
|
||||
//@ ignore-fuchsia no execvp syscall
|
||||
#![feature(process_exec, rustc_private)]
|
||||
|
||||
#![feature(rustc_private)]
|
||||
|
||||
extern crate libc;
|
||||
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
//@ run-pass
|
||||
//@ ignore-windows - this is a unix-specific test
|
||||
//@ ignore-wasm32
|
||||
//@ ignore-sgx
|
||||
//@ only-unix (this is a unix-specific test)
|
||||
//@ ignore-musl - returns dummy result for _SC_NGROUPS_MAX
|
||||
//@ ignore-nto - does not have `/bin/id`, expects groups to be i32 (not u32)
|
||||
//@ needs-subprocess
|
||||
|
||||
#![feature(rustc_private)]
|
||||
#![feature(setgroups)]
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
//@ run-pass
|
||||
//@ ignore-android
|
||||
//@ ignore-emscripten
|
||||
//@ ignore-sgx
|
||||
//@ ignore-fuchsia no '/bin/sh', '/bin/ls'
|
||||
//@ needs-subprocess
|
||||
|
||||
#![feature(rustc_private)]
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//@ run-pass
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
// Make sure that if a process doesn't have its stdio/stderr descriptors set up
|
||||
// that we don't die in a large ball of fire
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:index out of bounds: the len is 5 but the index is 5
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
const fn test(x: usize) -> i32 {
|
||||
[42;5][x]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//@ run-fail
|
||||
//@ needs-unwind
|
||||
//@ error-pattern:coroutine resumed after panicking
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
// Test that we get the correct message for resuming a panicked coroutine.
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#![allow(dead_code)]
|
||||
#![allow(unused_assignments)]
|
||||
#![allow(unused_variables)]
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
//@ needs-unwind
|
||||
|
||||
use std::thread;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// Issue #787
|
||||
// Don't try to clean up uninitialized locals
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:explicit panic
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn f() -> ! {
|
||||
panic!()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:explicit panic
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
let _x = if false {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:Number is odd
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn even(x: usize) -> bool {
|
||||
if x < 2 {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:quux
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn my_err(s: String) -> ! {
|
||||
println!("{}", s);
|
||||
|
|
2
tests/ui/extern/issue-18576.rs
vendored
2
tests/ui/extern/issue-18576.rs
vendored
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:stop
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
// #18576
|
||||
// Make sure that calling an extern function pointer in an unreachable
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// rust-lang/rust#64655: with panic=unwind, a panic from a subroutine
|
||||
// should still run destructors as it unwinds the stack. However,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// rust-lang/rust#64655: with panic=unwind, a panic from a subroutine
|
||||
// should still run destructors as it unwinds the stack. However,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:explicit panic
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn f() -> ! {
|
||||
panic!()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:capacity overflow
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::collections::hash_map::HashMap;
|
||||
use std::mem::size_of;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
//@ run-fail
|
||||
//@ error-pattern:panic works
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::*;
|
||||
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
// ignore-tidy-linelength
|
||||
//! This test checks panic emitted from `mem::{uninitialized,zeroed}`.
|
||||
//@ run-pass
|
||||
//@ revisions: default strict
|
||||
//@ [strict]compile-flags: -Zstrict-init-checks
|
||||
// ignore-tidy-linelength
|
||||
//@ ignore-wasm32 spawning processes is not supported
|
||||
//@ ignore-sgx no processes
|
||||
//
|
||||
// This test checks panic emitted from `mem::{uninitialized,zeroed}`.
|
||||
//@ needs-subprocess
|
||||
|
||||
#![allow(deprecated, invalid_value)]
|
||||
#![feature(never_type)]
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:explicit panic
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
pub fn main() {
|
||||
panic!();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:bad input
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
Some("foo").unwrap_or(panic!("bad input")).to_string();
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
//@ run-fail
|
||||
//@ error-pattern:Hello, world!
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
pub trait Parser {
|
||||
type Input;
|
||||
|
|
|
@ -1,20 +1,16 @@
|
|||
//@ run-pass
|
||||
#![allow(unused_must_use)]
|
||||
#![allow(non_upper_case_globals)]
|
||||
|
||||
//@ ignore-emscripten no threads
|
||||
//@ check-pass
|
||||
|
||||
use std::thread::Builder;
|
||||
|
||||
static generations: usize = 1024+256+128+49;
|
||||
static GENERATIONS: usize = 1024+256+128+49;
|
||||
|
||||
fn spawn(mut f: Box<dyn FnMut() + 'static + Send>) {
|
||||
Builder::new().stack_size(32 * 1024).spawn(move|| f());
|
||||
Builder::new().stack_size(32 * 1024).spawn(move || f());
|
||||
}
|
||||
|
||||
fn child_no(x: usize) -> Box<dyn FnMut() + 'static + Send> {
|
||||
Box::new(move|| {
|
||||
if x < generations {
|
||||
Box::new(move || {
|
||||
if x < GENERATIONS {
|
||||
spawn(child_no(x+1));
|
||||
}
|
||||
})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:panic evaluated
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn main() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:panic evaluated
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn main() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:index out of bounds
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::mem;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
use std::thread;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
// Check that the destructors of simple enums are run on unwinding
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:custom message
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert!(false, "custom message");
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#![allow(unused_attributes)]
|
||||
//@ aux-build:issue-29485.rs
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads
|
||||
//@ needs-threads
|
||||
|
||||
#[feature(recover)]
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
// SIGTRAP injected by the drop-flag consistency checking.
|
||||
|
||||
//@ needs-unwind
|
||||
//@ ignore-emscripten no threads support
|
||||
//@ needs-threads
|
||||
|
||||
struct Foo;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:so long
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#![allow(unreachable_code)]
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
//@ run-fail
|
||||
//@ error-pattern:panicking destructors ftw!
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
struct Observer<'a>(&'a mut FilledOnDrop);
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
//@ run-pass
|
||||
//@ ignore-wasm32 no processes
|
||||
//@ ignore-sgx no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::process::{Command, Stdio};
|
||||
use std::env;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//@ run-pass
|
||||
//@ ignore-emscripten
|
||||
//@ needs-threads
|
||||
|
||||
#[repr(C)]
|
||||
pub struct Foo(i128);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:overflow
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:overflow
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::time::{Instant, Duration};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:overflow
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//@ revisions: edition2021 edition2024
|
||||
//@ ignore-wasm no panic or subprocess support
|
||||
//@ ignore-wasm no panic support
|
||||
//@ needs-subprocess
|
||||
//@ [edition2024] edition: 2024
|
||||
//@ run-pass
|
||||
//@ needs-unwind
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:moop
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
for _ in 0_usize..10_usize {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:assertion failed: 1 == 2
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert!(1 == 2);
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//@ error-pattern:assertion `left == right` failed: 1 + 1 definitely should be 3
|
||||
//@ error-pattern: left: 2
|
||||
//@ error-pattern: right: 3
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert_eq!(1 + 1, 3, "1 + 1 definitely should be 3");
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//@ error-pattern:assertion `left == right` failed
|
||||
//@ error-pattern: left: 14
|
||||
//@ error-pattern: right: 15
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert_eq!(14, 15);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:assertion failed: false
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert!(false);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern: panicked
|
||||
//@ error-pattern: test-assert-fmt 42 rust
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert!(false, "test-assert-fmt {} {}", 42, "rust");
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:panicked
|
||||
//@ error-pattern:test-assert-owned
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#![allow(non_fmt_panics)]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//@ run-fail
|
||||
//@ error-pattern:panicked
|
||||
//@ error-pattern:test-assert-static
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert!(false, "test-assert-static");
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//@ error-pattern:assertion `left matches right` failed: 1 + 1 definitely should be 3
|
||||
//@ error-pattern: left: 2
|
||||
//@ error-pattern: right: 3
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
#![feature(assert_matches)]
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//@ error-pattern:assertion `left != right` failed: 1 + 1 definitely should not be 2
|
||||
//@ error-pattern: left: 2
|
||||
//@ error-pattern: right: 2
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert_ne!(1 + 1, 2, "1 + 1 definitely should not be 2");
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//@ error-pattern:assertion `left != right` failed
|
||||
//@ error-pattern: left: 14
|
||||
//@ error-pattern: right: 14
|
||||
//@ ignore-emscripten no processes
|
||||
//@ needs-subprocess
|
||||
|
||||
fn main() {
|
||||
assert_ne!(14, 14);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue