Auto merge of #139781 - jhpratt:rollup-qadsjvb, r=jhpratt

Rollup of 9 pull requests

Successful merges:

 - #138336 (Improve `-Z crate-attr` diagnostics)
 - #139636 (Encode dep node edge count as u32 instead of usize)
 - #139666 (cleanup `mir_borrowck`)
 - #139695 (compiletest: consistently use `camino::{Utf8Path,Utf8PathBuf}` throughout)
 - #139699 (Proactively update coroutine drop shim's phase to account for later passes applied during shim query)
 - #139718 (enforce unsafe attributes in pre-2024 editions by default)
 - #139722 (Move some things to rustc_type_ir)
 - #139760 (UI tests: migrate remaining compile time `error-pattern`s to line annotations when possible)
 - #139776 (Switch attrs to `diagnostic::on_unimplemented`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-04-14 07:07:54 +00:00
commit 5961e5ba3d
152 changed files with 1818 additions and 1713 deletions

View file

@ -719,6 +719,7 @@ version = "0.0.0"
dependencies = [
"anstyle-svg",
"build_helper",
"camino",
"colored",
"diff",
"getopts",
@ -4671,6 +4672,7 @@ name = "rustdoc-gui-test"
version = "0.1.0"
dependencies = [
"build_helper",
"camino",
"compiletest",
"getopts",
"walkdir",

View file

@ -627,7 +627,7 @@ pub fn mk_doc_comment(
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: g.mk_attr_id(), style, span }
}
pub fn mk_attr(
fn mk_attr(
g: &AttrIdGenerator,
style: AttrStyle,
unsafety: Safety,

View file

@ -21,6 +21,7 @@ use std::cell::RefCell;
use std::marker::PhantomData;
use std::ops::{ControlFlow, Deref};
use borrow_set::LocalsStateAtExit;
use root_cx::BorrowCheckRootCtxt;
use rustc_abi::FieldIdx;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
@ -303,33 +304,13 @@ fn do_mir_borrowck<'tcx>(
root_cx.set_tainted_by_errors(e);
}
let mut local_names = IndexVec::from_elem(None, &input_body.local_decls);
for var_debug_info in &input_body.var_debug_info {
if let VarDebugInfoContents::Place(place) = var_debug_info.value {
if let Some(local) = place.as_local() {
if let Some(prev_name) = local_names[local]
&& var_debug_info.name != prev_name
{
span_bug!(
var_debug_info.source_info.span,
"local {:?} has many names (`{}` vs `{}`)",
local,
prev_name,
var_debug_info.name
);
}
local_names[local] = Some(var_debug_info.name);
}
}
}
// Replace all regions with fresh inference variables. This
// requires first making our own copy of the MIR. This copy will
// be modified (in place) to contain non-lexical lifetimes. It
// will have a lifetime tied to the inference context.
let mut body_owned = input_body.clone();
let mut promoted = input_promoted.to_owned();
let free_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted);
let universal_regions = nll::replace_regions_in_mir(&infcx, &mut body_owned, &mut promoted);
let body = &body_owned; // no further changes
let location_table = PoloniusLocationTable::new(body);
@ -354,7 +335,7 @@ fn do_mir_borrowck<'tcx>(
} = nll::compute_regions(
root_cx,
&infcx,
free_regions,
universal_regions,
body,
&promoted,
&location_table,
@ -367,24 +348,23 @@ fn do_mir_borrowck<'tcx>(
// Dump MIR results into a file, if that is enabled. This lets us
// write unit-tests, as well as helping with debugging.
nll::dump_nll_mir(&infcx, body, &regioncx, &opt_closure_req, &borrow_set);
polonius::dump_polonius_mir(
&infcx,
body,
&regioncx,
&opt_closure_req,
&borrow_set,
polonius_diagnostics.as_ref(),
);
// We also have a `#[rustc_regions]` annotation that causes us to dump
// information.
nll::dump_annotation(&infcx, body, &regioncx, &opt_closure_req);
let movable_coroutine = body.coroutine.is_some()
&& tcx.coroutine_movability(def.to_def_id()) == hir::Movability::Movable;
let diags_buffer = &mut BorrowckDiagnosticsBuffer::default();
nll::dump_annotation(&infcx, body, &regioncx, &opt_closure_req, diags_buffer);
let movable_coroutine =
// The first argument is the coroutine type passed by value
if let Some(local) = body.local_decls.raw.get(1)
// Get the interior types and args which typeck computed
&& let ty::Coroutine(def_id, _) = *local.ty.kind()
&& tcx.coroutine_movability(def_id) == hir::Movability::Movable
{
true
} else {
false
};
// While promoteds should mostly be correct by construction, we need to check them for
// invalid moves to detect moving out of arrays:`struct S; fn main() { &([S][0]); }`.
for promoted_body in &promoted {
@ -402,7 +382,6 @@ fn do_mir_borrowck<'tcx>(
location_table: &location_table,
movable_coroutine,
fn_self_span_reported: Default::default(),
locals_are_invalidated_at_exit,
access_place_error_reported: Default::default(),
reservation_error_reported: Default::default(),
uninitialized_error_reported: Default::default(),
@ -434,6 +413,26 @@ fn do_mir_borrowck<'tcx>(
promoted_mbcx.report_move_errors();
}
let mut local_names = IndexVec::from_elem(None, &body.local_decls);
for var_debug_info in &body.var_debug_info {
if let VarDebugInfoContents::Place(place) = var_debug_info.value {
if let Some(local) = place.as_local() {
if let Some(prev_name) = local_names[local]
&& var_debug_info.name != prev_name
{
span_bug!(
var_debug_info.source_info.span,
"local {:?} has many names (`{}` vs `{}`)",
local,
prev_name,
var_debug_info.name
);
}
local_names[local] = Some(var_debug_info.name);
}
}
}
let mut mbcx = MirBorrowckCtxt {
root_cx,
infcx: &infcx,
@ -441,7 +440,6 @@ fn do_mir_borrowck<'tcx>(
move_data: &move_data,
location_table: &location_table,
movable_coroutine,
locals_are_invalidated_at_exit,
fn_self_span_reported: Default::default(),
access_place_error_reported: Default::default(),
reservation_error_reported: Default::default(),
@ -454,9 +452,9 @@ fn do_mir_borrowck<'tcx>(
local_names,
region_names: RefCell::default(),
next_region_name: RefCell::new(1),
polonius_output,
move_errors: Vec::new(),
diags_buffer,
polonius_output: polonius_output.as_deref(),
polonius_diagnostics: polonius_diagnostics.as_ref(),
};
@ -473,16 +471,6 @@ fn do_mir_borrowck<'tcx>(
mbcx.report_move_errors();
// If requested, dump polonius MIR.
polonius::dump_polonius_mir(
&infcx,
body,
&regioncx,
&borrow_set,
polonius_diagnostics.as_ref(),
&opt_closure_req,
);
// For each non-user used mutable variable, check if it's been assigned from
// a user-declared local. If so, then put that local into the used_mut set.
// Note that this set is expected to be small - only upvars from closures
@ -513,7 +501,6 @@ fn do_mir_borrowck<'tcx>(
};
let body_with_facts = if consumer_options.is_some() {
let output_facts = mbcx.polonius_output;
Some(Box::new(BodyWithBorrowckFacts {
body: body_owned,
promoted,
@ -521,7 +508,7 @@ fn do_mir_borrowck<'tcx>(
region_inference_context: regioncx,
location_table: polonius_input.as_ref().map(|_| location_table),
input_facts: polonius_input,
output_facts,
output_facts: polonius_output,
}))
} else {
None
@ -654,13 +641,6 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> {
location_table: &'a PoloniusLocationTable,
movable_coroutine: bool,
/// This keeps track of whether local variables are free-ed when the function
/// exits even without a `StorageDead`, which appears to be the case for
/// constants.
///
/// I'm not sure this is the right approach - @eddyb could you try and
/// figure this out?
locals_are_invalidated_at_exit: bool,
/// This field keeps track of when borrow errors are reported in the access_place function
/// so that there is no duplicate reporting. This field cannot also be used for the conflicting
/// borrow errors that is handled by the `reservation_error_reported` field as the inclusion
@ -708,12 +688,11 @@ struct MirBorrowckCtxt<'a, 'infcx, 'tcx> {
/// The counter for generating new region names.
next_region_name: RefCell<usize>,
/// Results of Polonius analysis.
polonius_output: Option<Box<PoloniusOutput>>,
diags_buffer: &'a mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>,
move_errors: Vec<MoveError<'tcx>>,
/// Results of Polonius analysis.
polonius_output: Option<&'a PoloniusOutput>,
/// When using `-Zpolonius=next`: the data used to compute errors and diagnostics.
polonius_diagnostics: Option<&'a PoloniusDiagnosticsContext>,
}
@ -937,13 +916,20 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<
| TerminatorKind::Return
| TerminatorKind::TailCall { .. }
| TerminatorKind::CoroutineDrop => {
// Returning from the function implicitly kills storage for all locals and statics.
// Often, the storage will already have been killed by an explicit
// StorageDead, but we don't always emit those (notably on unwind paths),
// so this "extra check" serves as a kind of backup.
for i in state.borrows.iter() {
let borrow = &self.borrow_set[i];
self.check_for_invalidation_at_exit(loc, borrow, span);
match self.borrow_set.locals_state_at_exit() {
LocalsStateAtExit::AllAreInvalidated => {
// Returning from the function implicitly kills storage for all locals and statics.
// Often, the storage will already have been killed by an explicit
// StorageDead, but we don't always emit those (notably on unwind paths),
// so this "extra check" serves as a kind of backup.
for i in state.borrows.iter() {
let borrow = &self.borrow_set[i];
self.check_for_invalidation_at_exit(loc, borrow, span);
}
}
// If we do not implicitly invalidate all locals on exit,
// we check for conflicts when dropping or moving this local.
LocalsStateAtExit::SomeAreInvalidated { has_storage_dead_or_moved: _ } => {}
}
}
@ -1715,22 +1701,15 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
// we'll have a memory leak) and assume that all statics have a destructor.
//
// FIXME: allow thread-locals to borrow other thread locals?
let (might_be_alive, will_be_dropped) =
if self.body.local_decls[root_place.local].is_ref_to_thread_local() {
// Thread-locals might be dropped after the function exits
// We have to dereference the outer reference because
// borrows don't conflict behind shared references.
root_place.projection = TyCtxtConsts::DEREF_PROJECTION;
(true, true)
} else {
(false, self.locals_are_invalidated_at_exit)
};
if !will_be_dropped {
debug!("place_is_invalidated_at_exit({:?}) - won't be dropped", place);
return;
}
let might_be_alive = if self.body.local_decls[root_place.local].is_ref_to_thread_local() {
// Thread-locals might be dropped after the function exits
// We have to dereference the outer reference because
// borrows don't conflict behind shared references.
root_place.projection = TyCtxtConsts::DEREF_PROJECTION;
true
} else {
false
};
let sd = if might_be_alive { Deep } else { Shallow(None) };

View file

@ -21,7 +21,7 @@ use tracing::{debug, instrument};
use crate::borrow_set::BorrowSet;
use crate::consumers::ConsumerOptions;
use crate::diagnostics::{BorrowckDiagnosticsBuffer, RegionErrors};
use crate::diagnostics::RegionErrors;
use crate::polonius::PoloniusDiagnosticsContext;
use crate::polonius::legacy::{
PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput,
@ -117,11 +117,6 @@ pub(crate) fn compute_regions<'a, 'tcx>(
Rc::clone(&location_map),
);
// Create the region inference context, taking ownership of the
// region inference data that was contained in `infcx`, and the
// base constraints generated by the type-check.
let var_infos = infcx.get_region_var_infos();
// If requested, emit legacy polonius facts.
polonius::legacy::emit_facts(
&mut polonius_facts,
@ -134,13 +129,8 @@ pub(crate) fn compute_regions<'a, 'tcx>(
&constraints,
);
let mut regioncx = RegionInferenceContext::new(
infcx,
var_infos,
constraints,
universal_region_relations,
location_map,
);
let mut regioncx =
RegionInferenceContext::new(infcx, constraints, universal_region_relations, location_map);
// If requested for `-Zpolonius=next`, convert NLL constraints to localized outlives constraints
// and use them to compute loan liveness.
@ -297,7 +287,6 @@ pub(super) fn dump_annotation<'tcx, 'infcx>(
body: &Body<'tcx>,
regioncx: &RegionInferenceContext<'tcx>,
closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
diagnostics_buffer: &mut BorrowckDiagnosticsBuffer<'infcx, 'tcx>,
) {
let tcx = infcx.tcx;
let base_def_id = tcx.typeck_root_def_id(body.source.def_id());
@ -335,13 +324,11 @@ pub(super) fn dump_annotation<'tcx, 'infcx>(
} else {
let mut err = infcx.dcx().struct_span_note(def_span, "no external requirements");
regioncx.annotate(tcx, &mut err);
err
};
// FIXME(@lcnr): We currently don't dump the inferred hidden types here.
diagnostics_buffer.buffer_non_error(err);
err.emit();
}
fn for_each_region_constraint<'tcx>(

View file

@ -24,9 +24,9 @@ pub(crate) fn dump_polonius_mir<'tcx>(
infcx: &BorrowckInferCtxt<'tcx>,
body: &Body<'tcx>,
regioncx: &RegionInferenceContext<'tcx>,
closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
borrow_set: &BorrowSet<'tcx>,
polonius_diagnostics: Option<&PoloniusDiagnosticsContext>,
closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
) {
let tcx = infcx.tcx;
if !tcx.sess.opts.unstable_opts.polonius.is_next_enabled() {

View file

@ -9,7 +9,7 @@ use rustc_errors::Diag;
use rustc_hir::def_id::CRATE_DEF_ID;
use rustc_index::IndexVec;
use rustc_infer::infer::outlives::test_type_match;
use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq};
use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound, VerifyIfEq};
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin};
use rustc_middle::bug;
use rustc_middle::mir::{
@ -145,7 +145,7 @@ pub struct RegionInferenceContext<'tcx> {
/// variables are identified by their index (`RegionVid`). The
/// definition contains information about where the region came
/// from as well as its final inferred value.
pub(crate) definitions: IndexVec<RegionVid, RegionDefinition<'tcx>>,
pub(crate) definitions: Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>>,
/// The liveness constraints added to each region. For most
/// regions, these start out empty and steadily grow, though for
@ -385,6 +385,26 @@ fn sccs_info<'tcx>(infcx: &BorrowckInferCtxt<'tcx>, sccs: &ConstraintSccs) {
debug!("SCC edges {:#?}", scc_node_to_edges);
}
fn create_definitions<'tcx>(
infcx: &BorrowckInferCtxt<'tcx>,
universal_regions: &UniversalRegions<'tcx>,
) -> Frozen<IndexVec<RegionVid, RegionDefinition<'tcx>>> {
// Create a RegionDefinition for each inference variable.
let mut definitions: IndexVec<_, _> = infcx
.get_region_var_infos()
.iter()
.map(|info| RegionDefinition::new(info.universe, info.origin))
.collect();
// Add the external name for all universal regions.
for (external_name, variable) in universal_regions.named_universal_regions_iter() {
debug!("region {variable:?} has external name {external_name:?}");
definitions[variable].external_name = Some(external_name);
}
Frozen::freeze(definitions)
}
impl<'tcx> RegionInferenceContext<'tcx> {
/// Creates a new region inference context with a total of
/// `num_region_variables` valid inference variables; the first N
@ -395,7 +415,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// of constraints produced by the MIR type check.
pub(crate) fn new(
infcx: &BorrowckInferCtxt<'tcx>,
var_infos: VarInfos,
constraints: MirTypeckRegionConstraints<'tcx>,
universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
location_map: Rc<DenseLocationMap>,
@ -426,11 +445,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
infcx.set_tainted_by_errors(guar);
}
// Create a RegionDefinition for each inference variable.
let definitions: IndexVec<_, _> = var_infos
.iter()
.map(|info| RegionDefinition::new(info.universe, info.origin))
.collect();
let definitions = create_definitions(infcx, &universal_regions);
let constraint_sccs =
outlives_constraints.add_outlives_static(&universal_regions, &definitions);
@ -526,18 +541,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
/// means that the `R1: !1` constraint here will cause
/// `R1` to become `'static`.
fn init_free_and_bound_regions(&mut self) {
// Update the names (if any)
// This iterator has unstable order but we collect it all into an IndexVec
for (external_name, variable) in
self.universal_region_relations.universal_regions.named_universal_regions_iter()
{
debug!(
"init_free_and_bound_regions: region {:?} has external name {:?}",
variable, external_name
);
self.definitions[variable].external_name = Some(external_name);
}
for variable in self.definitions.indices() {
let scc = self.constraint_sccs.scc(variable);

View file

@ -21,7 +21,6 @@ use crate::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategor
pub(crate) struct ConstraintConversion<'a, 'tcx> {
infcx: &'a InferCtxt<'tcx>,
tcx: TyCtxt<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
/// Each RBP `GK: 'a` is assumed to be true. These encode
/// relationships like `T: 'a` that are added via implicit bounds
@ -34,7 +33,6 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> {
/// logic expecting to see (e.g.) `ReStatic`, and if we supplied
/// our special inference variable there, we would mess that up.
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
param_env: ty::ParamEnv<'tcx>,
known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>],
locations: Locations,
@ -49,7 +47,6 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
infcx: &'a InferCtxt<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
region_bound_pairs: &'a RegionBoundPairs<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
param_env: ty::ParamEnv<'tcx>,
known_type_outlives_obligations: &'a [ty::PolyTypeOutlivesPredicate<'tcx>],
locations: Locations,
@ -59,10 +56,8 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
) -> Self {
Self {
infcx,
tcx: infcx.tcx,
universal_regions,
region_bound_pairs,
implicit_region_bound,
param_env,
known_type_outlives_obligations,
locations,
@ -96,7 +91,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
// into a vector. These are the regions that we will be
// relating to one another.
let closure_mapping = &UniversalRegions::closure_mapping(
self.tcx,
self.infcx.tcx,
closure_args,
closure_requirements.num_external_vids,
closure_def_id,
@ -111,7 +106,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
let subject = match outlives_requirement.subject {
ClosureOutlivesSubject::Region(re) => closure_mapping[re].into(),
ClosureOutlivesSubject::Ty(subject_ty) => {
subject_ty.instantiate(self.tcx, |vid| closure_mapping[vid]).into()
subject_ty.instantiate(self.infcx.tcx, |vid| closure_mapping[vid]).into()
}
};
@ -127,14 +122,14 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
predicate: ty::OutlivesPredicate<'tcx, ty::GenericArg<'tcx>>,
constraint_category: ConstraintCategory<'tcx>,
) {
let tcx = self.infcx.tcx;
debug!("generate: constraints at: {:#?}", self.locations);
// Extract out various useful fields we'll need below.
let ConstraintConversion {
tcx,
infcx,
universal_regions,
region_bound_pairs,
implicit_region_bound,
known_type_outlives_obligations,
..
} = *self;
@ -145,7 +140,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
break;
}
if !self.tcx.recursion_limit().value_within_limit(iteration) {
if !tcx.recursion_limit().value_within_limit(iteration) {
bug!(
"FIXME(-Znext-solver): Overflowed when processing region obligations: {outlives_predicates:#?}"
);
@ -170,10 +165,11 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
);
}
let implicit_region_bound =
ty::Region::new_var(tcx, universal_regions.implicit_region_bound());
// we don't actually use this for anything, but
// the `TypeOutlives` code needs an origin.
let origin = infer::RelateParamBound(self.span, t1, None);
TypeOutlives::new(
&mut *self,
tcx,
@ -205,7 +201,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
/// are dealt with during trait solving.
fn replace_placeholders_with_nll<T: TypeFoldable<TyCtxt<'tcx>>>(&mut self, value: T) -> T {
if value.has_placeholders() {
fold_regions(self.tcx, value, |r, _| match r.kind() {
fold_regions(self.infcx.tcx, value, |r, _| match r.kind() {
ty::RePlaceholder(placeholder) => {
self.constraints.placeholder_region(self.infcx, placeholder)
}

View file

@ -49,14 +49,12 @@ pub(crate) struct CreateResult<'tcx> {
pub(crate) fn create<'tcx>(
infcx: &InferCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
universal_regions: UniversalRegions<'tcx>,
constraints: &mut MirTypeckRegionConstraints<'tcx>,
) -> CreateResult<'tcx> {
UniversalRegionRelationsBuilder {
infcx,
param_env,
implicit_region_bound,
constraints,
universal_regions,
region_bound_pairs: Default::default(),
@ -181,7 +179,6 @@ struct UniversalRegionRelationsBuilder<'a, 'tcx> {
infcx: &'a InferCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
universal_regions: UniversalRegions<'tcx>,
implicit_region_bound: ty::Region<'tcx>,
constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
// outputs:
@ -320,7 +317,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
self.infcx,
&self.universal_regions,
&self.region_bound_pairs,
self.implicit_region_bound,
param_env,
&known_type_outlives_obligations,
Locations::All(span),

View file

@ -113,7 +113,6 @@ pub(crate) fn type_check<'a, 'tcx>(
move_data: &MoveData<'tcx>,
location_map: Rc<DenseLocationMap>,
) -> MirTypeckResults<'tcx> {
let implicit_region_bound = ty::Region::new_var(infcx.tcx, universal_regions.fr_fn_body);
let mut constraints = MirTypeckRegionConstraints {
placeholder_indices: PlaceholderIndices::default(),
placeholder_index_to_region: IndexVec::default(),
@ -129,13 +128,7 @@ pub(crate) fn type_check<'a, 'tcx>(
region_bound_pairs,
normalized_inputs_and_output,
known_type_outlives_obligations,
} = free_region_relations::create(
infcx,
infcx.param_env,
implicit_region_bound,
universal_regions,
&mut constraints,
);
} = free_region_relations::create(infcx, infcx.param_env, universal_regions, &mut constraints);
let pre_obligations = infcx.take_registered_region_obligations();
assert!(
@ -160,7 +153,6 @@ pub(crate) fn type_check<'a, 'tcx>(
user_type_annotations: &body.user_type_annotations,
region_bound_pairs,
known_type_outlives_obligations,
implicit_region_bound,
reported_errors: Default::default(),
universal_regions: &universal_region_relations.universal_regions,
location_table,
@ -226,7 +218,6 @@ struct TypeChecker<'a, 'tcx> {
user_type_annotations: &'a CanonicalUserTypeAnnotations<'tcx>,
region_bound_pairs: RegionBoundPairs<'tcx>,
known_type_outlives_obligations: Vec<ty::PolyTypeOutlivesPredicate<'tcx>>,
implicit_region_bound: ty::Region<'tcx>,
reported_errors: FxIndexSet<(Ty<'tcx>, Span)>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a PoloniusLocationTable,
@ -422,7 +413,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self.infcx,
self.universal_regions,
&self.region_bound_pairs,
self.implicit_region_bound,
self.infcx.param_env,
&self.known_type_outlives_obligations,
locations,
@ -2507,7 +2497,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self.infcx,
self.universal_regions,
&self.region_bound_pairs,
self.implicit_region_bound,
self.infcx.param_env,
&self.known_type_outlives_obligations,
locations,

View file

@ -438,6 +438,10 @@ impl<'tcx> UniversalRegions<'tcx> {
}
}
pub(crate) fn implicit_region_bound(&self) -> RegionVid {
self.fr_fn_body
}
pub(crate) fn tainted_by_errors(&self) -> Option<ErrorGuaranteed> {
self.indices.tainted_by_errors.get()
}

View file

@ -231,8 +231,6 @@ builtin_macros_format_unused_args = multiple unused formatting arguments
builtin_macros_format_use_positional = consider using a positional formatting argument instead
builtin_macros_invalid_crate_attribute = invalid crate attribute
builtin_macros_multiple_default_attrs = multiple `#[default]` attributes
.note = only one `#[default]` attribute is needed
.label = `#[default]` used here

View file

@ -1,44 +1,37 @@
//! Attributes injected into the crate root from command line using `-Z crate-attr`.
use rustc_ast::attr::mk_attr;
use rustc_ast::{self as ast, AttrItem, AttrStyle, token};
use rustc_parse::parser::ForceCollect;
use rustc_parse::{new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_ast::{self as ast};
use rustc_errors::Diag;
use rustc_parse::parser::attr::InnerAttrPolicy;
use rustc_parse::{parse_in, source_str_to_stream};
use rustc_session::parse::ParseSess;
use rustc_span::FileName;
use crate::errors;
pub fn inject(krate: &mut ast::Crate, psess: &ParseSess, attrs: &[String]) {
for raw_attr in attrs {
let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
psess,
FileName::cli_crate_attr_source_code(raw_attr),
raw_attr.clone(),
));
let start_span = parser.token.span;
let AttrItem { unsafety, path, args, tokens: _ } =
match parser.parse_attr_item(ForceCollect::No) {
Ok(ai) => ai,
Err(err) => {
let source = format!("#![{raw_attr}]");
let parse = || -> Result<ast::Attribute, Vec<Diag<'_>>> {
let tokens = source_str_to_stream(
psess,
FileName::cli_crate_attr_source_code(raw_attr),
source,
None,
)?;
parse_in(psess, tokens, "<crate attribute>", |p| {
p.parse_attribute(InnerAttrPolicy::Permitted)
})
.map_err(|e| vec![e])
};
let meta = match parse() {
Ok(meta) => meta,
Err(errs) => {
for err in errs {
err.emit();
continue;
}
};
let end_span = parser.token.span;
if parser.token != token::Eof {
psess.dcx().emit_err(errors::InvalidCrateAttr { span: start_span.to(end_span) });
continue;
}
continue;
}
};
krate.attrs.push(mk_attr(
&psess.attr_id_generator,
AttrStyle::Inner,
unsafety,
path,
args,
start_span.to(end_span),
));
krate.attrs.push(meta);
}
}

View file

@ -109,13 +109,6 @@ pub(crate) struct ProcMacro {
pub(crate) span: Span,
}
#[derive(Diagnostic)]
#[diag(builtin_macros_invalid_crate_attribute)]
pub(crate) struct InvalidCrateAttr {
#[primary_span]
pub(crate) span: Span,
}
#[derive(Diagnostic)]
#[diag(builtin_macros_non_abi)]
pub(crate) struct NonABI {

View file

@ -1,13 +1,13 @@
use std::alloc::Allocator;
#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
// is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a
// `Send` type in `IntoDynSyncSend` will create a `DynSend` type.
pub unsafe auto trait DynSend {}
#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \
#[diagnostic::on_unimplemented(message = "`{Self}` doesn't implement `DynSync`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`")]
// This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()`
// is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a

View file

@ -5,7 +5,7 @@ Erroneous code example:
```compile_fail,E0755
#![feature(ffi_pure)]
#[ffi_pure] // error!
#[unsafe(ffi_pure)] // error!
pub fn foo() {}
# fn main() {}
```
@ -17,7 +17,7 @@ side effects or infinite loops:
#![feature(ffi_pure)]
extern "C" {
#[ffi_pure] // ok!
#[unsafe(ffi_pure)] // ok!
pub fn strlen(s: *const i8) -> isize;
}
# fn main() {}

View file

@ -6,7 +6,7 @@ Erroneous code example:
```compile_fail,E0756
#![feature(ffi_const)]
#[ffi_const] // error!
#[unsafe(ffi_const)] // error!
pub fn foo() {}
# fn main() {}
```
@ -18,7 +18,7 @@ which have no side effects except for their return value:
#![feature(ffi_const)]
extern "C" {
#[ffi_const] // ok!
#[unsafe(ffi_const)] // ok!
pub fn strlen(s: *const i8) -> i32;
}
# fn main() {}

View file

@ -6,8 +6,9 @@ Erroneous code example:
#![feature(ffi_const, ffi_pure)]
extern "C" {
#[ffi_const]
#[ffi_pure] // error: `#[ffi_const]` function cannot be `#[ffi_pure]`
#[unsafe(ffi_const)]
#[unsafe(ffi_pure)]
//~^ ERROR `#[ffi_const]` function cannot be `#[ffi_pure]`
pub fn square(num: i32) -> i32;
}
```
@ -19,7 +20,7 @@ As `ffi_const` provides stronger guarantees than `ffi_pure`, remove the
#![feature(ffi_const)]
extern "C" {
#[ffi_const]
#[unsafe(ffi_const)]
pub fn square(num: i32) -> i32;
}
```

View file

@ -6,6 +6,7 @@ use AttributeDuplicates::*;
use AttributeGate::*;
use AttributeType::*;
use rustc_data_structures::fx::FxHashMap;
use rustc_span::edition::Edition;
use rustc_span::{Symbol, sym};
use crate::{Features, Stability};
@ -65,9 +66,12 @@ pub enum AttributeSafety {
/// Normal attribute that does not need `#[unsafe(...)]`
Normal,
/// Unsafe attribute that requires safety obligations
/// to be discharged
Unsafe,
/// Unsafe attribute that requires safety obligations to be discharged.
///
/// An error is emitted when `#[unsafe(...)]` is omitted, except when the attribute's edition
/// is less than the one stored in `unsafe_since`. This handles attributes that were safe in
/// earlier editions, but become unsafe in later ones.
Unsafe { unsafe_since: Option<Edition> },
}
#[derive(Clone, Copy)]
@ -187,12 +191,23 @@ macro_rules! template {
}
macro_rules! ungated {
(unsafe($edition:ident) $attr:ident, $typ:expr, $tpl:expr, $duplicates:expr, $encode_cross_crate:expr $(,)?) => {
BuiltinAttribute {
name: sym::$attr,
encode_cross_crate: $encode_cross_crate,
type_: $typ,
safety: AttributeSafety::Unsafe { unsafe_since: Some(Edition::$edition) },
template: $tpl,
gate: Ungated,
duplicates: $duplicates,
}
};
(unsafe $attr:ident, $typ:expr, $tpl:expr, $duplicates:expr, $encode_cross_crate:expr $(,)?) => {
BuiltinAttribute {
name: sym::$attr,
encode_cross_crate: $encode_cross_crate,
type_: $typ,
safety: AttributeSafety::Unsafe,
safety: AttributeSafety::Unsafe { unsafe_since: None },
template: $tpl,
gate: Ungated,
duplicates: $duplicates,
@ -217,7 +232,7 @@ macro_rules! gated {
name: sym::$attr,
encode_cross_crate: $encode_cross_crate,
type_: $typ,
safety: AttributeSafety::Unsafe,
safety: AttributeSafety::Unsafe { unsafe_since: None },
template: $tpl,
duplicates: $duplicates,
gate: Gated(Stability::Unstable, sym::$gate, $msg, Features::$gate),
@ -228,7 +243,7 @@ macro_rules! gated {
name: sym::$attr,
encode_cross_crate: $encode_cross_crate,
type_: $typ,
safety: AttributeSafety::Unsafe,
safety: AttributeSafety::Unsafe { unsafe_since: None },
template: $tpl,
duplicates: $duplicates,
gate: Gated(Stability::Unstable, sym::$attr, $msg, Features::$attr),
@ -423,9 +438,9 @@ pub static BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
),
ungated!(no_link, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No),
ungated!(repr, Normal, template!(List: "C"), DuplicatesOk, EncodeCrossCrate::No),
ungated!(unsafe export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No),
ungated!(unsafe link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No),
ungated!(unsafe no_mangle, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No),
ungated!(unsafe(Edition2024) export_name, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No),
ungated!(unsafe(Edition2024) link_section, Normal, template!(NameValueStr: "name"), FutureWarnPreceding, EncodeCrossCrate::No),
ungated!(unsafe(Edition2024) no_mangle, Normal, template!(Word), WarnFollowing, EncodeCrossCrate::No),
ungated!(used, Normal, template!(Word, List: "compiler|linker"), WarnFollowing, EncodeCrossCrate::No),
ungated!(link_ordinal, Normal, template!(List: "ordinal"), ErrorPreceding, EncodeCrossCrate::Yes),

View file

@ -89,7 +89,6 @@ macro_rules! arena_types {
[] name_set: rustc_data_structures::unord::UnordSet<rustc_span::Symbol>,
[] autodiff_item: rustc_ast::expand::autodiff_attrs::AutoDiffItem,
[] ordered_name_set: rustc_data_structures::fx::FxIndexSet<rustc_span::Symbol>,
[] pats: rustc_middle::ty::PatternKind<'tcx>,
[] valtree: rustc_middle::ty::ValTreeKind<'tcx>,
// Note that this deliberately duplicates items in the `rustc_hir::arena`,

View file

@ -3,6 +3,7 @@ use std::borrow::Cow;
use rustc_data_structures::intern::Interned;
use rustc_error_messages::MultiSpan;
use rustc_macros::HashStable;
use rustc_type_ir::walk::TypeWalker;
use rustc_type_ir::{self as ir, TypeFlags, WithCachedTypeInfo};
use crate::ty::{self, Ty, TyCtxt};
@ -243,4 +244,18 @@ impl<'tcx> Const<'tcx> {
pub fn is_ct_infer(self) -> bool {
matches!(self.kind(), ty::ConstKind::Infer(_))
}
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<TyCtxt<'tcx>> {
TypeWalker::new(self.into())
}
}

View file

@ -870,7 +870,7 @@ impl<'tcx> CtxtInterners<'tcx> {
Ty(Interned::new_unchecked(
self.type_
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_kind(&kind);
let flags = ty::FlagComputation::<TyCtxt<'tcx>>::for_kind(&kind);
let stable_hash = self.stable_hash(&flags, sess, untracked, &kind);
InternedInSet(self.arena.alloc(WithCachedTypeInfo {
@ -896,7 +896,7 @@ impl<'tcx> CtxtInterners<'tcx> {
Const(Interned::new_unchecked(
self.const_
.intern(kind, |kind: ty::ConstKind<'_>| {
let flags = super::flags::FlagComputation::for_const_kind(&kind);
let flags = ty::FlagComputation::<TyCtxt<'tcx>>::for_const_kind(&kind);
let stable_hash = self.stable_hash(&flags, sess, untracked, &kind);
InternedInSet(self.arena.alloc(WithCachedTypeInfo {
@ -912,7 +912,7 @@ impl<'tcx> CtxtInterners<'tcx> {
fn stable_hash<'a, T: HashStable<StableHashingContext<'a>>>(
&self,
flags: &ty::flags::FlagComputation,
flags: &ty::FlagComputation<TyCtxt<'tcx>>,
sess: &'a Session,
untracked: &'a Untracked,
val: &T,
@ -940,7 +940,7 @@ impl<'tcx> CtxtInterners<'tcx> {
Predicate(Interned::new_unchecked(
self.predicate
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_predicate(kind);
let flags = ty::FlagComputation::<TyCtxt<'tcx>>::for_predicate(kind);
let stable_hash = self.stable_hash(&flags, sess, untracked, &kind);
@ -961,7 +961,7 @@ impl<'tcx> CtxtInterners<'tcx> {
} else {
self.clauses
.intern_ref(clauses, || {
let flags = super::flags::FlagComputation::for_clauses(clauses);
let flags = ty::FlagComputation::<TyCtxt<'tcx>>::for_clauses(clauses);
InternedInSet(ListWithCachedTypeInfo::from_arena(
&*self.arena,

View file

@ -1,359 +0,0 @@
use std::slice;
use crate::ty::{self, GenericArg, GenericArgKind, InferConst, Ty, TypeFlags};
#[derive(Debug)]
pub struct FlagComputation {
pub flags: TypeFlags,
/// see `Ty::outer_exclusive_binder` for details
pub outer_exclusive_binder: ty::DebruijnIndex,
}
impl FlagComputation {
fn new() -> FlagComputation {
FlagComputation { flags: TypeFlags::empty(), outer_exclusive_binder: ty::INNERMOST }
}
#[allow(rustc::usage_of_ty_tykind)]
pub fn for_kind(kind: &ty::TyKind<'_>) -> FlagComputation {
let mut result = FlagComputation::new();
result.add_kind(kind);
result
}
pub fn for_predicate(binder: ty::Binder<'_, ty::PredicateKind<'_>>) -> FlagComputation {
let mut result = FlagComputation::new();
result.add_predicate(binder);
result
}
pub fn for_const_kind(kind: &ty::ConstKind<'_>) -> FlagComputation {
let mut result = FlagComputation::new();
result.add_const_kind(kind);
result
}
pub fn for_clauses(clauses: &[ty::Clause<'_>]) -> FlagComputation {
let mut result = FlagComputation::new();
for c in clauses {
result.add_flags(c.as_predicate().flags());
result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder());
}
result
}
fn add_flags(&mut self, flags: TypeFlags) {
self.flags = self.flags | flags;
}
/// indicates that `self` refers to something at binding level `binder`
fn add_bound_var(&mut self, binder: ty::DebruijnIndex) {
let exclusive_binder = binder.shifted_in(1);
self.add_exclusive_binder(exclusive_binder);
}
/// indicates that `self` refers to something *inside* binding
/// level `binder` -- not bound by `binder`, but bound by the next
/// binder internal to it
fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) {
self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder);
}
/// Adds the flags/depth from a set of types that appear within the current type, but within a
/// region binder.
fn bound_computation<T, F>(&mut self, value: ty::Binder<'_, T>, f: F)
where
F: FnOnce(&mut Self, T),
{
let mut computation = FlagComputation::new();
if !value.bound_vars().is_empty() {
computation.add_flags(TypeFlags::HAS_BINDER_VARS);
}
f(&mut computation, value.skip_binder());
self.add_flags(computation.flags);
// The types that contributed to `computation` occurred within
// a region binder, so subtract one from the region depth
// within when adding the depth to `self`.
let outer_exclusive_binder = computation.outer_exclusive_binder;
if outer_exclusive_binder > ty::INNERMOST {
self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1));
} // otherwise, this binder captures nothing
}
#[allow(rustc::usage_of_ty_tykind)]
fn add_kind(&mut self, kind: &ty::TyKind<'_>) {
match kind {
&ty::Bool
| &ty::Char
| &ty::Int(_)
| &ty::Float(_)
| &ty::Uint(_)
| &ty::Never
| &ty::Str
| &ty::Foreign(..) => {}
&ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR),
&ty::Param(_) => {
self.add_flags(TypeFlags::HAS_TY_PARAM);
}
&ty::Closure(_, args)
| &ty::Coroutine(_, args)
| &ty::CoroutineClosure(_, args)
| &ty::CoroutineWitness(_, args) => {
self.add_args(args);
}
&ty::Bound(debruijn, _) => {
self.add_bound_var(debruijn);
self.add_flags(TypeFlags::HAS_TY_BOUND);
}
&ty::Placeholder(..) => {
self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER);
}
&ty::Infer(infer) => match infer {
ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => {
self.add_flags(TypeFlags::HAS_TY_FRESH)
}
ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => {
self.add_flags(TypeFlags::HAS_TY_INFER)
}
},
&ty::Adt(_, args) => {
self.add_args(args);
}
&ty::Alias(kind, data) => {
self.add_flags(match kind {
ty::Projection => TypeFlags::HAS_TY_PROJECTION,
ty::Weak => TypeFlags::HAS_TY_WEAK,
ty::Opaque => TypeFlags::HAS_TY_OPAQUE,
ty::Inherent => TypeFlags::HAS_TY_INHERENT,
});
self.add_alias_ty(data);
}
&ty::Dynamic(obj, r, _) => {
for predicate in obj.iter() {
self.bound_computation(predicate, |computation, predicate| match predicate {
ty::ExistentialPredicate::Trait(tr) => computation.add_args(tr.args),
ty::ExistentialPredicate::Projection(p) => {
computation.add_existential_projection(&p);
}
ty::ExistentialPredicate::AutoTrait(_) => {}
});
}
self.add_region(r);
}
&ty::Array(tt, len) => {
self.add_ty(tt);
self.add_const(len);
}
&ty::Pat(ty, pat) => {
self.add_ty(ty);
match *pat {
ty::PatternKind::Range { start, end } => {
self.add_const(start);
self.add_const(end);
}
}
}
&ty::Slice(tt) => self.add_ty(tt),
&ty::RawPtr(ty, _) => {
self.add_ty(ty);
}
&ty::Ref(r, ty, _) => {
self.add_region(r);
self.add_ty(ty);
}
&ty::Tuple(types) => {
self.add_tys(types);
}
&ty::FnDef(_, args) => {
self.add_args(args);
}
&ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| {
computation.add_tys(sig_tys.inputs_and_output);
}),
&ty::UnsafeBinder(bound_ty) => {
self.bound_computation(bound_ty.into(), |computation, ty| {
computation.add_ty(ty);
})
}
}
}
fn add_predicate(&mut self, binder: ty::Binder<'_, ty::PredicateKind<'_>>) {
self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom));
}
fn add_predicate_atom(&mut self, atom: ty::PredicateKind<'_>) {
match atom {
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => {
self.add_args(trait_pred.trait_ref.args);
}
ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate {
trait_ref,
constness: _,
})) => {
self.add_args(trait_ref.args);
}
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(
a,
b,
))) => {
self.add_region(a);
self.add_region(b);
}
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(
ty,
region,
))) => {
self.add_ty(ty);
self.add_region(region);
}
ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => {
self.add_const(ct);
self.add_ty(ty);
}
ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => {
self.add_ty(a);
self.add_ty(b);
}
ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => {
self.add_ty(a);
self.add_ty(b);
}
ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate {
projection_term,
term,
})) => {
self.add_alias_term(projection_term);
self.add_term(term);
}
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => {
self.add_args(slice::from_ref(&arg));
}
ty::PredicateKind::DynCompatible(_def_id) => {}
ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => {
self.add_const(uv);
}
ty::PredicateKind::ConstEquate(expected, found) => {
self.add_const(expected);
self.add_const(found);
}
ty::PredicateKind::Ambiguous => {}
ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => {
self.add_alias_term(alias);
self.add_term(term);
}
ty::PredicateKind::AliasRelate(t1, t2, _) => {
self.add_term(t1);
self.add_term(t2);
}
}
}
fn add_ty(&mut self, ty: Ty<'_>) {
self.add_flags(ty.flags());
self.add_exclusive_binder(ty.outer_exclusive_binder());
}
fn add_tys(&mut self, tys: &[Ty<'_>]) {
for &ty in tys {
self.add_ty(ty);
}
}
fn add_region(&mut self, r: ty::Region<'_>) {
self.add_flags(r.type_flags());
if let ty::ReBound(debruijn, _) = r.kind() {
self.add_bound_var(debruijn);
}
}
fn add_const(&mut self, c: ty::Const<'_>) {
self.add_flags(c.flags());
self.add_exclusive_binder(c.outer_exclusive_binder());
}
fn add_const_kind(&mut self, c: &ty::ConstKind<'_>) {
match *c {
ty::ConstKind::Unevaluated(uv) => {
self.add_args(uv.args);
self.add_flags(TypeFlags::HAS_CT_PROJECTION);
}
ty::ConstKind::Infer(infer) => match infer {
InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH),
InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER),
},
ty::ConstKind::Bound(debruijn, _) => {
self.add_bound_var(debruijn);
self.add_flags(TypeFlags::HAS_CT_BOUND);
}
ty::ConstKind::Param(_) => {
self.add_flags(TypeFlags::HAS_CT_PARAM);
}
ty::ConstKind::Placeholder(_) => {
self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER);
}
ty::ConstKind::Value(cv) => self.add_ty(cv.ty),
ty::ConstKind::Expr(e) => self.add_args(e.args()),
ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR),
}
}
fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection<'_>) {
self.add_args(projection.args);
match projection.term.unpack() {
ty::TermKind::Ty(ty) => self.add_ty(ty),
ty::TermKind::Const(ct) => self.add_const(ct),
}
}
fn add_alias_ty(&mut self, alias_ty: ty::AliasTy<'_>) {
self.add_args(alias_ty.args);
}
fn add_alias_term(&mut self, alias_term: ty::AliasTerm<'_>) {
self.add_args(alias_term.args);
}
fn add_args(&mut self, args: &[GenericArg<'_>]) {
for kind in args {
match kind.unpack() {
GenericArgKind::Type(ty) => self.add_ty(ty),
GenericArgKind::Lifetime(lt) => self.add_region(lt),
GenericArgKind::Const(ct) => self.add_const(ct),
}
}
}
fn add_term(&mut self, term: ty::Term<'_>) {
match term.unpack() {
ty::TermKind::Ty(ty) => self.add_ty(ty),
ty::TermKind::Const(ct) => self.add_const(ct),
}
}
}

View file

@ -11,6 +11,7 @@ use rustc_hir::def_id::DefId;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension};
use rustc_serialize::{Decodable, Encodable};
use rustc_type_ir::WithCachedTypeInfo;
use rustc_type_ir::walk::TypeWalker;
use smallvec::SmallVec;
use crate::ty::codec::{TyDecoder, TyEncoder};
@ -297,6 +298,20 @@ impl<'tcx> GenericArg<'tcx> {
GenericArgKind::Const(ct) => ct.is_ct_infer(),
}
}
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<TyCtxt<'tcx>> {
TypeWalker::new(self)
}
}
impl<'a, 'tcx> Lift<TyCtxt<'tcx>> for GenericArg<'a> {

View file

@ -7,9 +7,9 @@ use std::{fmt, iter, mem, ptr, slice};
use rustc_data_structures::aligned::{Aligned, align_of};
use rustc_data_structures::sync::DynSync;
use rustc_serialize::{Encodable, Encoder};
use rustc_type_ir::FlagComputation;
use super::flags::FlagComputation;
use super::{DebruijnIndex, TypeFlags};
use super::{DebruijnIndex, TyCtxt, TypeFlags};
use crate::arena::Arena;
/// `List<T>` is a bit like `&[T]`, but with some critical differences.
@ -299,8 +299,8 @@ impl TypeInfo {
}
}
impl From<FlagComputation> for TypeInfo {
fn from(computation: FlagComputation) -> TypeInfo {
impl<'tcx> From<FlagComputation<TyCtxt<'tcx>>> for TypeInfo {
fn from(computation: FlagComputation<TyCtxt<'tcx>>) -> TypeInfo {
TypeInfo {
flags: computation.flags,
outer_exclusive_binder: computation.outer_exclusive_binder,

View file

@ -117,7 +117,6 @@ pub mod cast;
pub mod codec;
pub mod error;
pub mod fast_reject;
pub mod flags;
pub mod inhabitedness;
pub mod layout;
pub mod normalize_erasing_regions;
@ -128,7 +127,6 @@ pub mod significant_drop_order;
pub mod trait_def;
pub mod util;
pub mod vtable;
pub mod walk;
mod adt;
mod assoc;

View file

@ -1,14 +1,40 @@
use std::fmt;
use rustc_data_structures::intern::Interned;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
use rustc_macros::HashStable;
use rustc_type_ir::ir_print::IrPrint;
use rustc_type_ir::{
FlagComputation, Flags, {self as ir},
};
use super::TyCtxt;
use crate::ty;
pub type PatternKind<'tcx> = ir::PatternKind<TyCtxt<'tcx>>;
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub struct Pattern<'tcx>(pub Interned<'tcx, PatternKind<'tcx>>);
impl<'tcx> Flags for Pattern<'tcx> {
fn flags(&self) -> rustc_type_ir::TypeFlags {
match &**self {
ty::PatternKind::Range { start, end } => {
FlagComputation::for_const_kind(&start.kind()).flags
| FlagComputation::for_const_kind(&end.kind()).flags
}
}
}
fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex {
match &**self {
ty::PatternKind::Range { start, end } => {
start.outer_exclusive_binder().max(end.outer_exclusive_binder())
}
}
}
}
impl<'tcx> std::ops::Deref for Pattern<'tcx> {
type Target = PatternKind<'tcx>;
@ -23,9 +49,9 @@ impl<'tcx> fmt::Debug for Pattern<'tcx> {
}
}
impl<'tcx> fmt::Debug for PatternKind<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
impl<'tcx> IrPrint<PatternKind<'tcx>> for TyCtxt<'tcx> {
fn print(t: &PatternKind<'tcx>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *t {
PatternKind::Range { start, end } => {
write!(f, "{start}")?;
@ -53,10 +79,15 @@ impl<'tcx> fmt::Debug for PatternKind<'tcx> {
}
}
}
fn print_debug(t: &PatternKind<'tcx>, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
Self::print(t, fmt)
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)]
pub enum PatternKind<'tcx> {
Range { start: ty::Const<'tcx>, end: ty::Const<'tcx> },
impl<'tcx> rustc_type_ir::inherent::IntoKind for Pattern<'tcx> {
type Kind = PatternKind<'tcx>;
fn kind(self) -> Self::Kind {
*self
}
}

View file

@ -16,6 +16,7 @@ use rustc_hir::def_id::DefId;
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, extension};
use rustc_span::{DUMMY_SP, Span, Symbol, sym};
use rustc_type_ir::TyKind::*;
use rustc_type_ir::walk::TypeWalker;
use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind, TypeVisitableExt, elaborate};
use tracing::instrument;
use ty::util::{AsyncDropGlueMorphology, IntTypeExt};
@ -2029,6 +2030,20 @@ impl<'tcx> Ty<'tcx> {
pub fn is_known_rigid(self) -> bool {
self.kind().is_known_rigid()
}
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<TyCtxt<'tcx>> {
TypeWalker::new(self.into())
}
}
impl<'tcx> rustc_type_ir::inherent::Tys<TyCtxt<'tcx>> for &'tcx ty::List<Ty<'tcx>> {

View file

@ -1169,6 +1169,13 @@ fn create_coroutine_drop_shim<'tcx>(
dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(()));
body.source.instance = drop_instance;
// Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)`
// but the pass manager doesn't update the phase of the coroutine drop shim. Update the
// phase of the drop shim so that later on when we run the pass manager on the shim, in
// the `mir_shims` query, we don't ICE on the intra-pass validation before we've updated
// the phase of the body from analysis.
body.phase = MirPhase::Runtime(RuntimePhase::Initial);
body
}

View file

@ -157,7 +157,7 @@ fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaIte
pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr: &Attribute) {
let attr_item = attr.get_normal_item();
if safety == AttributeSafety::Unsafe {
if let AttributeSafety::Unsafe { unsafe_since } = safety {
if let ast::Safety::Default = attr_item.unsafety {
let path_span = attr_item.path.span;
@ -167,7 +167,13 @@ pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr:
// square bracket respectively.
let diag_span = attr_item.span();
if attr.span.at_least_rust_2024() {
// Attributes can be safe in earlier editions, and become unsafe in later ones.
let emit_error = match unsafe_since {
None => true,
Some(unsafe_since) => attr.span.edition() >= unsafe_since,
};
if emit_error {
psess.dcx().emit_err(errors::UnsafeAttrOutsideUnsafe {
span: path_span,
suggestion: errors::UnsafeAttrOutsideUnsafeSuggestion {

View file

@ -226,12 +226,12 @@ impl SerializedDepGraph {
// If the length of this node's edge list is small, the length is stored in the header.
// If it is not, we fall back to another decoder call.
let num_edges = node_header.len().unwrap_or_else(|| d.read_usize());
let num_edges = node_header.len().unwrap_or_else(|| d.read_u32());
// The edges index list uses the same varint strategy as rmeta tables; we select the
// number of byte elements per-array not per-element. This lets us read the whole edge
// list for a node with one decoder call and also use the on-disk format in memory.
let edges_len_bytes = node_header.bytes_per_index() * num_edges;
let edges_len_bytes = node_header.bytes_per_index() * (num_edges as usize);
// The in-memory structure for the edges list stores the byte width of the edges on
// this node with the offset into the global edge data array.
let edges_header = node_header.edges_header(&edge_list_data);
@ -296,7 +296,7 @@ struct SerializedNodeHeader<D> {
// The fields of a `SerializedNodeHeader`, this struct is an implementation detail and exists only
// to make the implementation of `SerializedNodeHeader` simpler.
struct Unpacked {
len: Option<usize>,
len: Option<u32>,
bytes_per_index: usize,
kind: DepKind,
hash: PackedFingerprint,
@ -352,7 +352,7 @@ impl<D: Deps> SerializedNodeHeader<D> {
assert_eq!(fingerprint, res.fingerprint());
assert_eq!(node, res.node());
if let Some(len) = res.len() {
assert_eq!(edge_count, len);
assert_eq!(edge_count, len as usize);
}
}
Self { bytes, _marker: PhantomData }
@ -366,7 +366,7 @@ impl<D: Deps> SerializedNodeHeader<D> {
let kind = head & mask(Self::KIND_BITS) as u16;
let bytes_per_index = (head >> Self::KIND_BITS) & mask(Self::WIDTH_BITS) as u16;
let len = (head as usize) >> (Self::WIDTH_BITS + Self::KIND_BITS);
let len = (head as u32) >> (Self::WIDTH_BITS + Self::KIND_BITS);
Unpacked {
len: len.checked_sub(1),
@ -378,7 +378,7 @@ impl<D: Deps> SerializedNodeHeader<D> {
}
#[inline]
fn len(&self) -> Option<usize> {
fn len(&self) -> Option<u32> {
self.unpack().len
}
@ -421,7 +421,8 @@ impl NodeInfo {
e.write_array(header.bytes);
if header.len().is_none() {
e.emit_usize(edges.len());
// The edges are all unique and the number of unique indices is less than u32::MAX.
e.emit_u32(edges.len().try_into().unwrap());
}
let bytes_per_index = header.bytes_per_index();
@ -456,7 +457,8 @@ impl NodeInfo {
e.write_array(header.bytes);
if header.len().is_none() {
e.emit_usize(edge_count);
// The edges are all unique and the number of unique indices is less than u32::MAX.
e.emit_u32(edge_count.try_into().unwrap());
}
let bytes_per_index = header.bytes_per_index();

View file

@ -268,8 +268,8 @@ trait_selection_oc_type_compat = type not compatible with trait
trait_selection_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds
.label = opaque type defined here
trait_selection_opaque_type_non_generic_param =
expected generic {$kind} parameter, found `{$ty}`
.label = {STREQ($ty, "'static") ->
expected generic {$kind} parameter, found `{$arg}`
.label = {STREQ($arg, "'static") ->
[true] cannot use static lifetime; use a bound lifetime instead or remove the lifetime parameter from the opaque type
*[other] this generic parameter must be used with a generic {$kind} parameter
}

View file

@ -1926,7 +1926,7 @@ impl Subdiagnostic for AddPreciseCapturingForOvercapture {
#[derive(Diagnostic)]
#[diag(trait_selection_opaque_type_non_generic_param, code = E0792)]
pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> {
pub ty: GenericArg<'tcx>,
pub arg: GenericArg<'tcx>,
pub kind: &'a str,
#[primary_span]
pub span: Span,

View file

@ -70,7 +70,7 @@ pub fn check_opaque_type_parameter_valid<'tcx>(
opaque_env.param_is_error(i)?;
return Err(infcx.dcx().emit_err(NonGenericOpaqueTypeParam {
ty: arg,
arg,
kind,
span,
param_span: tcx.def_span(opaque_param.def_id),

View file

@ -1,3 +1,9 @@
use std::slice;
use crate::inherent::*;
use crate::visit::Flags;
use crate::{self as ty, Interner};
bitflags::bitflags! {
/// Flags that we track on types. These flags are propagated upwards
/// through the type during type construction, so that we can quickly check
@ -128,3 +134,362 @@ bitflags::bitflags! {
const HAS_BINDER_VARS = 1 << 23;
}
}
#[derive(Debug)]
pub struct FlagComputation<I> {
pub flags: TypeFlags,
/// see `Ty::outer_exclusive_binder` for details
pub outer_exclusive_binder: ty::DebruijnIndex,
interner: std::marker::PhantomData<I>,
}
impl<I: Interner> FlagComputation<I> {
fn new() -> FlagComputation<I> {
FlagComputation {
flags: TypeFlags::empty(),
outer_exclusive_binder: ty::INNERMOST,
interner: std::marker::PhantomData,
}
}
#[allow(rustc::usage_of_ty_tykind)]
pub fn for_kind(kind: &ty::TyKind<I>) -> FlagComputation<I> {
let mut result = FlagComputation::new();
result.add_kind(kind);
result
}
pub fn for_predicate(binder: ty::Binder<I, ty::PredicateKind<I>>) -> FlagComputation<I> {
let mut result = FlagComputation::new();
result.add_predicate(binder);
result
}
pub fn for_const_kind(kind: &ty::ConstKind<I>) -> FlagComputation<I> {
let mut result = FlagComputation::new();
result.add_const_kind(kind);
result
}
pub fn for_clauses(clauses: &[I::Clause]) -> FlagComputation<I> {
let mut result = FlagComputation::new();
for c in clauses {
result.add_flags(c.as_predicate().flags());
result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder());
}
result
}
fn add_flags(&mut self, flags: TypeFlags) {
self.flags = self.flags | flags;
}
/// indicates that `self` refers to something at binding level `binder`
fn add_bound_var(&mut self, binder: ty::DebruijnIndex) {
let exclusive_binder = binder.shifted_in(1);
self.add_exclusive_binder(exclusive_binder);
}
/// indicates that `self` refers to something *inside* binding
/// level `binder` -- not bound by `binder`, but bound by the next
/// binder internal to it
fn add_exclusive_binder(&mut self, exclusive_binder: ty::DebruijnIndex) {
self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder);
}
/// Adds the flags/depth from a set of types that appear within the current type, but within a
/// region binder.
fn bound_computation<T, F>(&mut self, value: ty::Binder<I, T>, f: F)
where
F: FnOnce(&mut Self, T),
{
let mut computation = FlagComputation::new();
if !value.bound_vars().is_empty() {
computation.add_flags(TypeFlags::HAS_BINDER_VARS);
}
f(&mut computation, value.skip_binder());
self.add_flags(computation.flags);
// The types that contributed to `computation` occurred within
// a region binder, so subtract one from the region depth
// within when adding the depth to `self`.
let outer_exclusive_binder = computation.outer_exclusive_binder;
if outer_exclusive_binder > ty::INNERMOST {
self.add_exclusive_binder(outer_exclusive_binder.shifted_out(1));
} // otherwise, this binder captures nothing
}
#[allow(rustc::usage_of_ty_tykind)]
fn add_kind(&mut self, kind: &ty::TyKind<I>) {
match *kind {
ty::Bool
| ty::Char
| ty::Int(_)
| ty::Float(_)
| ty::Uint(_)
| ty::Never
| ty::Str
| ty::Foreign(..) => {}
ty::Error(_) => self.add_flags(TypeFlags::HAS_ERROR),
ty::Param(_) => {
self.add_flags(TypeFlags::HAS_TY_PARAM);
}
ty::Closure(_, args)
| ty::Coroutine(_, args)
| ty::CoroutineClosure(_, args)
| ty::CoroutineWitness(_, args) => {
self.add_args(args.as_slice());
}
ty::Bound(debruijn, _) => {
self.add_bound_var(debruijn);
self.add_flags(TypeFlags::HAS_TY_BOUND);
}
ty::Placeholder(..) => {
self.add_flags(TypeFlags::HAS_TY_PLACEHOLDER);
}
ty::Infer(infer) => match infer {
ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) => {
self.add_flags(TypeFlags::HAS_TY_FRESH)
}
ty::TyVar(_) | ty::IntVar(_) | ty::FloatVar(_) => {
self.add_flags(TypeFlags::HAS_TY_INFER)
}
},
ty::Adt(_, args) => {
self.add_args(args.as_slice());
}
ty::Alias(kind, data) => {
self.add_flags(match kind {
ty::Projection => TypeFlags::HAS_TY_PROJECTION,
ty::Weak => TypeFlags::HAS_TY_WEAK,
ty::Opaque => TypeFlags::HAS_TY_OPAQUE,
ty::Inherent => TypeFlags::HAS_TY_INHERENT,
});
self.add_alias_ty(data);
}
ty::Dynamic(obj, r, _) => {
for predicate in obj.iter() {
self.bound_computation(predicate, |computation, predicate| match predicate {
ty::ExistentialPredicate::Trait(tr) => {
computation.add_args(tr.args.as_slice())
}
ty::ExistentialPredicate::Projection(p) => {
computation.add_existential_projection(&p);
}
ty::ExistentialPredicate::AutoTrait(_) => {}
});
}
self.add_region(r);
}
ty::Array(tt, len) => {
self.add_ty(tt);
self.add_const(len);
}
ty::Pat(ty, pat) => {
self.add_ty(ty);
self.add_flags(pat.flags());
}
ty::Slice(tt) => self.add_ty(tt),
ty::RawPtr(ty, _) => {
self.add_ty(ty);
}
ty::Ref(r, ty, _) => {
self.add_region(r);
self.add_ty(ty);
}
ty::Tuple(types) => {
self.add_tys(types);
}
ty::FnDef(_, args) => {
self.add_args(args.as_slice());
}
ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| {
computation.add_tys(sig_tys.inputs_and_output);
}),
ty::UnsafeBinder(bound_ty) => {
self.bound_computation(bound_ty.into(), |computation, ty| {
computation.add_ty(ty);
})
}
}
}
fn add_predicate(&mut self, binder: ty::Binder<I, ty::PredicateKind<I>>) {
self.bound_computation(binder, |computation, atom| computation.add_predicate_atom(atom));
}
fn add_predicate_atom(&mut self, atom: ty::PredicateKind<I>) {
match atom {
ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_pred)) => {
self.add_args(trait_pred.trait_ref.args.as_slice());
}
ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(ty::HostEffectPredicate {
trait_ref,
constness: _,
})) => {
self.add_args(trait_ref.args.as_slice());
}
ty::PredicateKind::Clause(ty::ClauseKind::RegionOutlives(ty::OutlivesPredicate(
a,
b,
))) => {
self.add_region(a);
self.add_region(b);
}
ty::PredicateKind::Clause(ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(
ty,
region,
))) => {
self.add_ty(ty);
self.add_region(region);
}
ty::PredicateKind::Clause(ty::ClauseKind::ConstArgHasType(ct, ty)) => {
self.add_const(ct);
self.add_ty(ty);
}
ty::PredicateKind::Subtype(ty::SubtypePredicate { a_is_expected: _, a, b }) => {
self.add_ty(a);
self.add_ty(b);
}
ty::PredicateKind::Coerce(ty::CoercePredicate { a, b }) => {
self.add_ty(a);
self.add_ty(b);
}
ty::PredicateKind::Clause(ty::ClauseKind::Projection(ty::ProjectionPredicate {
projection_term,
term,
})) => {
self.add_alias_term(projection_term);
self.add_term(term);
}
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(arg)) => {
self.add_args(slice::from_ref(&arg));
}
ty::PredicateKind::DynCompatible(_def_id) => {}
ty::PredicateKind::Clause(ty::ClauseKind::ConstEvaluatable(uv)) => {
self.add_const(uv);
}
ty::PredicateKind::ConstEquate(expected, found) => {
self.add_const(expected);
self.add_const(found);
}
ty::PredicateKind::Ambiguous => {}
ty::PredicateKind::NormalizesTo(ty::NormalizesTo { alias, term }) => {
self.add_alias_term(alias);
self.add_term(term);
}
ty::PredicateKind::AliasRelate(t1, t2, _) => {
self.add_term(t1);
self.add_term(t2);
}
}
}
fn add_ty(&mut self, ty: I::Ty) {
self.add_flags(ty.flags());
self.add_exclusive_binder(ty.outer_exclusive_binder());
}
fn add_tys(&mut self, tys: I::Tys) {
for ty in tys.iter() {
self.add_ty(ty);
}
}
fn add_region(&mut self, r: I::Region) {
self.add_flags(r.flags());
if let ty::ReBound(debruijn, _) = r.kind() {
self.add_bound_var(debruijn);
}
}
fn add_const(&mut self, c: I::Const) {
self.add_flags(c.flags());
self.add_exclusive_binder(c.outer_exclusive_binder());
}
fn add_const_kind(&mut self, c: &ty::ConstKind<I>) {
match *c {
ty::ConstKind::Unevaluated(uv) => {
self.add_args(uv.args.as_slice());
self.add_flags(TypeFlags::HAS_CT_PROJECTION);
}
ty::ConstKind::Infer(infer) => match infer {
ty::InferConst::Fresh(_) => self.add_flags(TypeFlags::HAS_CT_FRESH),
ty::InferConst::Var(_) => self.add_flags(TypeFlags::HAS_CT_INFER),
},
ty::ConstKind::Bound(debruijn, _) => {
self.add_bound_var(debruijn);
self.add_flags(TypeFlags::HAS_CT_BOUND);
}
ty::ConstKind::Param(_) => {
self.add_flags(TypeFlags::HAS_CT_PARAM);
}
ty::ConstKind::Placeholder(_) => {
self.add_flags(TypeFlags::HAS_CT_PLACEHOLDER);
}
ty::ConstKind::Value(cv) => self.add_ty(cv.ty()),
ty::ConstKind::Expr(e) => self.add_args(e.args().as_slice()),
ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR),
}
}
fn add_existential_projection(&mut self, projection: &ty::ExistentialProjection<I>) {
self.add_args(projection.args.as_slice());
match projection.term.kind() {
ty::TermKind::Ty(ty) => self.add_ty(ty),
ty::TermKind::Const(ct) => self.add_const(ct),
}
}
fn add_alias_ty(&mut self, alias_ty: ty::AliasTy<I>) {
self.add_args(alias_ty.args.as_slice());
}
fn add_alias_term(&mut self, alias_term: ty::AliasTerm<I>) {
self.add_args(alias_term.args.as_slice());
}
fn add_args(&mut self, args: &[I::GenericArg]) {
for kind in args {
match kind.kind() {
ty::GenericArgKind::Type(ty) => self.add_ty(ty),
ty::GenericArgKind::Lifetime(lt) => self.add_region(lt),
ty::GenericArgKind::Const(ct) => self.add_const(ct),
}
}
}
fn add_term(&mut self, term: I::Term) {
match term.kind() {
ty::TermKind::Ty(ty) => self.add_ty(ty),
ty::TermKind::Const(ct) => self.add_const(ct),
}
}
}

View file

@ -583,7 +583,7 @@ pub trait Span<I: Interner>: Copy + Debug + Hash + Eq + TypeFoldable<I> {
pub trait SliceLike: Sized + Copy {
type Item: Copy;
type IntoIter: Iterator<Item = Self::Item>;
type IntoIter: Iterator<Item = Self::Item> + DoubleEndedIterator;
fn iter(self) -> Self::IntoIter;

View file

@ -31,6 +31,7 @@ pub trait Interner:
+ IrPrint<ty::SubtypePredicate<Self>>
+ IrPrint<ty::CoercePredicate<Self>>
+ IrPrint<ty::FnSig<Self>>
+ IrPrint<ty::PatternKind<Self>>
{
type DefId: DefId<Self>;
type LocalDefId: Copy + Debug + Hash + Eq + Into<Self::DefId> + TypeFoldable<Self>;
@ -104,7 +105,14 @@ pub trait Interner:
type ErrorGuaranteed: Copy + Debug + Hash + Eq;
type BoundExistentialPredicates: BoundExistentialPredicates<Self>;
type AllocId: Copy + Debug + Hash + Eq;
type Pat: Copy + Debug + Hash + Eq + Debug + Relate<Self>;
type Pat: Copy
+ Debug
+ Hash
+ Eq
+ Debug
+ Relate<Self>
+ Flags
+ IntoKind<Kind = ty::PatternKind<Self>>;
type Safety: Safety<Self>;
type Abi: Abi<Self>;

View file

@ -2,8 +2,8 @@ use std::fmt;
use crate::{
AliasTerm, AliasTy, Binder, CoercePredicate, ExistentialProjection, ExistentialTraitRef, FnSig,
HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, ProjectionPredicate,
SubtypePredicate, TraitPredicate, TraitRef,
HostEffectPredicate, Interner, NormalizesTo, OutlivesPredicate, PatternKind,
ProjectionPredicate, SubtypePredicate, TraitPredicate, TraitRef,
};
pub trait IrPrint<T> {
@ -57,9 +57,10 @@ define_display_via_print!(
AliasTy,
AliasTerm,
FnSig,
PatternKind,
);
define_debug_via_print!(TraitRef, ExistentialTraitRef, ExistentialProjection);
define_debug_via_print!(TraitRef, ExistentialTraitRef, ExistentialProjection, PatternKind);
impl<I: Interner, T> fmt::Display for OutlivesPredicate<I, T>
where

View file

@ -31,6 +31,7 @@ pub mod outlives;
pub mod relate;
pub mod search_graph;
pub mod solve;
pub mod walk;
// These modules are not `pub` since they are glob-imported.
#[macro_use]
@ -44,6 +45,7 @@ mod generic_arg;
mod infer_ctxt;
mod interner;
mod opaque_ty;
mod pattern;
mod predicate;
mod predicate_kind;
mod region_kind;
@ -67,6 +69,7 @@ pub use generic_arg::*;
pub use infer_ctxt::*;
pub use interner::*;
pub use opaque_ty::*;
pub use pattern::*;
pub use predicate::*;
pub use predicate_kind::*;
pub use region_kind::*;

View file

@ -0,0 +1,16 @@
use derive_where::derive_where;
#[cfg(feature = "nightly")]
use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext};
use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic};
use crate::Interner;
#[derive_where(Clone, Copy, Hash, PartialEq, Eq; I: Interner)]
#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)]
#[cfg_attr(
feature = "nightly",
derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext)
)]
pub enum PatternKind<I: Interner> {
Range { start: I::Const, end: I::Const },
}

View file

@ -1,20 +1,21 @@
//! An iterator over the type substructure.
//! WARNING: this does not keep track of the region depth.
use rustc_data_structures::sso::SsoHashSet;
use smallvec::{SmallVec, smallvec};
use tracing::debug;
use crate::ty::{self, GenericArg, GenericArgKind, Ty};
use crate::data_structures::SsoHashSet;
use crate::inherent::*;
use crate::{self as ty, Interner};
// The TypeWalker's stack is hot enough that it's worth going to some effort to
// avoid heap allocations.
type TypeWalkerStack<'tcx> = SmallVec<[GenericArg<'tcx>; 8]>;
type TypeWalkerStack<I> = SmallVec<[<I as Interner>::GenericArg; 8]>;
pub struct TypeWalker<'tcx> {
stack: TypeWalkerStack<'tcx>,
pub struct TypeWalker<I: Interner> {
stack: TypeWalkerStack<I>,
last_subtree: usize,
pub visited: SsoHashSet<GenericArg<'tcx>>,
pub visited: SsoHashSet<I::GenericArg>,
}
/// An iterator for walking the type tree.
@ -25,8 +26,8 @@ pub struct TypeWalker<'tcx> {
/// in this situation walker only visits each type once.
/// It maintains a set of visited types and
/// skips any types that are already there.
impl<'tcx> TypeWalker<'tcx> {
pub fn new(root: GenericArg<'tcx>) -> Self {
impl<I: Interner> TypeWalker<I> {
pub fn new(root: I::GenericArg) -> Self {
Self { stack: smallvec![root], last_subtree: 1, visited: SsoHashSet::new() }
}
@ -47,16 +48,16 @@ impl<'tcx> TypeWalker<'tcx> {
}
}
impl<'tcx> Iterator for TypeWalker<'tcx> {
type Item = GenericArg<'tcx>;
impl<I: Interner> Iterator for TypeWalker<I> {
type Item = I::GenericArg;
fn next(&mut self) -> Option<GenericArg<'tcx>> {
fn next(&mut self) -> Option<I::GenericArg> {
debug!("next(): stack={:?}", self.stack);
loop {
let next = self.stack.pop()?;
self.last_subtree = self.stack.len();
if self.visited.insert(next) {
push_inner(&mut self.stack, next);
push_inner::<I>(&mut self.stack, next);
debug!("next: stack={:?}", self.stack);
return Some(next);
}
@ -64,63 +65,15 @@ impl<'tcx> Iterator for TypeWalker<'tcx> {
}
}
impl<'tcx> GenericArg<'tcx> {
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<'tcx> {
TypeWalker::new(self)
}
}
impl<'tcx> Ty<'tcx> {
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<'tcx> {
TypeWalker::new(self.into())
}
}
impl<'tcx> ty::Const<'tcx> {
/// Iterator that walks `self` and any types reachable from
/// `self`, in depth-first order. Note that just walks the types
/// that appear in `self`, it does not descend into the fields of
/// structs or variants. For example:
///
/// ```text
/// isize => { isize }
/// Foo<Bar<isize>> => { Foo<Bar<isize>>, Bar<isize>, isize }
/// [isize] => { [isize], isize }
/// ```
pub fn walk(self) -> TypeWalker<'tcx> {
TypeWalker::new(self.into())
}
}
/// We push `GenericArg`s on the stack in reverse order so as to
/// maintain a pre-order traversal. As of the time of this
/// writing, the fact that the traversal is pre-order is not
/// known to be significant to any code, but it seems like the
/// natural order one would expect (basically, the order of the
/// types as they are written).
fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) {
match parent.unpack() {
GenericArgKind::Type(parent_ty) => match *parent_ty.kind() {
fn push_inner<I: Interner>(stack: &mut TypeWalkerStack<I>, parent: I::GenericArg) {
match parent.kind() {
ty::GenericArgKind::Type(parent_ty) => match parent_ty.kind() {
ty::Bool
| ty::Char
| ty::Int(_)
@ -136,7 +89,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::Foreign(..) => {}
ty::Pat(ty, pat) => {
match *pat {
match pat.kind() {
ty::PatternKind::Range { start, end } => {
stack.push(end.into());
stack.push(start.into());
@ -163,22 +116,25 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
}
ty::Dynamic(obj, lt, _) => {
stack.push(lt.into());
stack.extend(obj.iter().rev().flat_map(|predicate| {
let (args, opt_ty) = match predicate.skip_binder() {
ty::ExistentialPredicate::Trait(tr) => (tr.args, None),
ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)),
ty::ExistentialPredicate::AutoTrait(_) =>
// Empty iterator
{
(ty::GenericArgs::empty(), None)
}
};
stack.extend(
obj.iter()
.rev()
.filter_map(|predicate| {
let (args, opt_ty) = match predicate.skip_binder() {
ty::ExistentialPredicate::Trait(tr) => (tr.args, None),
ty::ExistentialPredicate::Projection(p) => (p.args, Some(p.term)),
ty::ExistentialPredicate::AutoTrait(_) => {
return None;
}
};
args.iter().rev().chain(opt_ty.map(|term| match term.unpack() {
ty::TermKind::Ty(ty) => ty.into(),
ty::TermKind::Const(ct) => ct.into(),
}))
}));
Some(args.iter().rev().chain(opt_ty.map(|term| match term.kind() {
ty::TermKind::Ty(ty) => ty.into(),
ty::TermKind::Const(ct) => ct.into(),
})))
})
.flatten(),
);
}
ty::Adt(_, args)
| ty::Closure(_, args)
@ -188,7 +144,7 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::FnDef(_, args) => {
stack.extend(args.iter().rev());
}
ty::Tuple(ts) => stack.extend(ts.iter().rev().map(GenericArg::from)),
ty::Tuple(ts) => stack.extend(ts.iter().rev().map(|ty| ty.into())),
ty::FnPtr(sig_tys, _hdr) => {
stack.extend(
sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()),
@ -198,15 +154,15 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
stack.push(bound_ty.skip_binder().into());
}
},
GenericArgKind::Lifetime(_) => {}
GenericArgKind::Const(parent_ct) => match parent_ct.kind() {
ty::GenericArgKind::Lifetime(_) => {}
ty::GenericArgKind::Const(parent_ct) => match parent_ct.kind() {
ty::ConstKind::Infer(_)
| ty::ConstKind::Param(_)
| ty::ConstKind::Placeholder(_)
| ty::ConstKind::Bound(..)
| ty::ConstKind::Error(_) => {}
ty::ConstKind::Value(cv) => stack.push(cv.ty.into()),
ty::ConstKind::Value(cv) => stack.push(cv.ty().into()),
ty::ConstKind::Expr(expr) => stack.extend(expr.args().iter().rev()),
ty::ConstKind::Unevaluated(ct) => {

View file

@ -10,6 +10,7 @@ doctest = false
# tidy-alphabetical-start
anstyle-svg = "0.1.3"
build_helper = { path = "../../build_helper" }
camino = "1"
colored = "2"
diff = "0.1.10"
getopts = "0.2"

View file

@ -1,18 +1,17 @@
use std::collections::{BTreeSet, HashMap, HashSet};
use std::ffi::OsString;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::FromStr;
use std::sync::OnceLock;
use std::{fmt, iter};
use build_helper::git::GitConfig;
use camino::{Utf8Path, Utf8PathBuf};
use semver::Version;
use serde::de::{Deserialize, Deserializer, Error as _};
pub use self::Mode::*;
use crate::executor::{ColorConfig, OutputFormat};
use crate::util::{PathBufExt, add_dylib_path};
use crate::util::{Utf8PathBufExt, add_dylib_path};
macro_rules! string_enum {
($(#[$meta:meta])* $vis:vis enum $name:ident { $($variant:ident => $repr:expr,)* }) => {
@ -183,25 +182,25 @@ pub struct Config {
pub fail_fast: bool,
/// The library paths required for running the compiler.
pub compile_lib_path: PathBuf,
pub compile_lib_path: Utf8PathBuf,
/// The library paths required for running compiled programs.
pub run_lib_path: PathBuf,
pub run_lib_path: Utf8PathBuf,
/// The rustc executable.
pub rustc_path: PathBuf,
pub rustc_path: Utf8PathBuf,
/// The cargo executable.
pub cargo_path: Option<PathBuf>,
pub cargo_path: Option<Utf8PathBuf>,
/// Rustc executable used to compile run-make recipes.
pub stage0_rustc_path: Option<PathBuf>,
pub stage0_rustc_path: Option<Utf8PathBuf>,
/// The rustdoc executable.
pub rustdoc_path: Option<PathBuf>,
pub rustdoc_path: Option<Utf8PathBuf>,
/// The coverage-dump executable.
pub coverage_dump_path: Option<PathBuf>,
pub coverage_dump_path: Option<Utf8PathBuf>,
/// The Python executable to use for LLDB and htmldocck.
pub python: String,
@ -213,27 +212,27 @@ pub struct Config {
pub jsondoclint_path: Option<String>,
/// The LLVM `FileCheck` binary path.
pub llvm_filecheck: Option<PathBuf>,
pub llvm_filecheck: Option<Utf8PathBuf>,
/// Path to LLVM's bin directory.
pub llvm_bin_dir: Option<PathBuf>,
pub llvm_bin_dir: Option<Utf8PathBuf>,
/// The path to the Clang executable to run Clang-based tests with. If
/// `None` then these tests will be ignored.
pub run_clang_based_tests_with: Option<String>,
/// The directory containing the sources.
pub src_root: PathBuf,
pub src_root: Utf8PathBuf,
/// The directory containing the test suite sources. Must be a subdirectory of `src_root`.
pub src_test_suite_root: PathBuf,
pub src_test_suite_root: Utf8PathBuf,
/// Root build directory (e.g. `build/`).
pub build_root: PathBuf,
pub build_root: Utf8PathBuf,
/// Test suite specific build directory (e.g. `build/host/test/ui/`).
pub build_test_suite_root: PathBuf,
pub build_test_suite_root: Utf8PathBuf,
/// The directory containing the compiler sysroot
pub sysroot_base: PathBuf,
pub sysroot_base: Utf8PathBuf,
/// The number of the stage under test.
pub stage: u32,
@ -301,7 +300,7 @@ pub struct Config {
pub host: String,
/// Path to / name of the Microsoft Console Debugger (CDB) executable
pub cdb: Option<OsString>,
pub cdb: Option<Utf8PathBuf>,
/// Version of CDB
pub cdb_version: Option<[u16; 4]>,
@ -322,7 +321,7 @@ pub struct Config {
pub system_llvm: bool,
/// Path to the android tools
pub android_cross_path: PathBuf,
pub android_cross_path: Utf8PathBuf,
/// Extra parameter to run adb on arm-linux-androideabi
pub adb_path: String,
@ -346,7 +345,7 @@ pub struct Config {
pub color: ColorConfig,
/// where to find the remote test client process, if we're using it
pub remote_test_client: Option<PathBuf>,
pub remote_test_client: Option<Utf8PathBuf>,
/// mode describing what file the actual ui output will be compared to
pub compare_mode: Option<CompareMode>,
@ -414,7 +413,7 @@ pub struct Config {
/// Path to minicore aux library, used for `no_core` tests that need `core` stubs in
/// cross-compilation scenarios that do not otherwise want/need to `-Zbuild-std`. Used in e.g.
/// ABI tests.
pub minicore_path: PathBuf,
pub minicore_path: Utf8PathBuf,
}
impl Config {
@ -804,8 +803,8 @@ fn serde_parse_u32<'de, D: Deserializer<'de>>(deserializer: D) -> Result<u32, D:
#[derive(Debug, Clone)]
pub struct TestPaths {
pub file: PathBuf, // e.g., compile-test/foo/bar/baz.rs
pub relative_dir: PathBuf, // e.g., foo/bar
pub file: Utf8PathBuf, // e.g., compile-test/foo/bar/baz.rs
pub relative_dir: Utf8PathBuf, // e.g., foo/bar
}
/// Used by `ui` tests to generate things like `foo.stderr` from `foo.rs`.
@ -814,7 +813,7 @@ pub fn expected_output_path(
revision: Option<&str>,
compare_mode: &Option<CompareMode>,
kind: &str,
) -> PathBuf {
) -> Utf8PathBuf {
assert!(UI_EXTENSIONS.contains(&kind));
let mut parts = Vec::new();
@ -865,7 +864,7 @@ pub const UI_COVERAGE_MAP: &str = "cov-map";
/// ```
///
/// This is created early when tests are collected to avoid race conditions.
pub fn output_relative_path(config: &Config, relative_dir: &Path) -> PathBuf {
pub fn output_relative_path(config: &Config, relative_dir: &Utf8Path) -> Utf8PathBuf {
config.build_test_suite_root.join(relative_dir)
}
@ -874,10 +873,10 @@ pub fn output_testname_unique(
config: &Config,
testpaths: &TestPaths,
revision: Option<&str>,
) -> PathBuf {
) -> Utf8PathBuf {
let mode = config.compare_mode.as_ref().map_or("", |m| m.to_str());
let debugger = config.debugger.as_ref().map_or("", |m| m.to_str());
PathBuf::from(&testpaths.file.file_stem().unwrap())
Utf8PathBuf::from(&testpaths.file.file_stem().unwrap())
.with_extra_extension(config.mode.output_dir_disambiguator())
.with_extra_extension(revision.unwrap_or(""))
.with_extra_extension(mode)
@ -887,7 +886,11 @@ pub fn output_testname_unique(
/// Absolute path to the directory where all output for the given
/// test/revision should reside. Example:
/// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/
pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
pub fn output_base_dir(
config: &Config,
testpaths: &TestPaths,
revision: Option<&str>,
) -> Utf8PathBuf {
output_relative_path(config, &testpaths.relative_dir)
.join(output_testname_unique(config, testpaths, revision))
}
@ -895,12 +898,20 @@ pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<
/// Absolute path to the base filename used as output for the given
/// test/revision. Example:
/// /path/to/build/host-tuple/test/ui/relative/testname.revision.mode/testname
pub fn output_base_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
pub fn output_base_name(
config: &Config,
testpaths: &TestPaths,
revision: Option<&str>,
) -> Utf8PathBuf {
output_base_dir(config, testpaths, revision).join(testpaths.file.file_stem().unwrap())
}
/// Absolute path to the directory to use for incremental compilation. Example:
/// /path/to/build/host-tuple/test/ui/relative/testname.mode/testname.inc
pub fn incremental_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
pub fn incremental_dir(
config: &Config,
testpaths: &TestPaths,
revision: Option<&str>,
) -> Utf8PathBuf {
output_base_name(config, testpaths, revision).with_extension("inc")
}

View file

@ -1,6 +1,7 @@
use std::collections::VecDeque;
use std::fs::{File, FileType};
use std::path::Path;
use camino::Utf8Path;
#[derive(Debug, PartialEq)]
pub enum DiffLine {
@ -112,8 +113,8 @@ pub(crate) fn write_diff(expected: &str, actual: &str, context_size: usize) -> S
/// Returns whether any data was actually written.
pub(crate) fn write_filtered_diff<Filter>(
diff_filename: &str,
out_dir: &Path,
compare_dir: &Path,
out_dir: &Utf8Path,
compare_dir: &Utf8Path,
verbose: bool,
filter: Filter,
) -> bool
@ -123,19 +124,21 @@ where
use std::io::{Read, Write};
let mut diff_output = File::create(diff_filename).unwrap();
let mut wrote_data = false;
for entry in walkdir::WalkDir::new(out_dir) {
for entry in walkdir::WalkDir::new(out_dir.as_std_path()) {
let entry = entry.expect("failed to read file");
let extension = entry.path().extension().and_then(|p| p.to_str());
if filter(entry.file_type(), extension) {
let expected_path = compare_dir.join(entry.path().strip_prefix(&out_dir).unwrap());
let expected_path = compare_dir
.as_std_path()
.join(entry.path().strip_prefix(&out_dir.as_std_path()).unwrap());
let expected = if let Ok(s) = std::fs::read(&expected_path) { s } else { continue };
let actual_path = entry.path();
let actual = std::fs::read(&actual_path).unwrap();
let diff = unified_diff::diff(
&expected,
&expected_path.to_string_lossy(),
&expected_path.to_str().unwrap(),
&actual,
&actual_path.to_string_lossy(),
&actual_path.to_str().unwrap(),
3,
);
wrote_data |= !diff.is_empty();

View file

@ -1,9 +1,9 @@
use std::env;
use std::ffi::OsString;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::Arc;
use camino::{Utf8Path, Utf8PathBuf};
use crate::common::{Config, Debugger};
pub(crate) fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
@ -78,12 +78,15 @@ fn is_pc_windows_msvc_target(target: &str) -> bool {
target.ends_with("-pc-windows-msvc")
}
fn find_cdb(target: &str) -> Option<OsString> {
fn find_cdb(target: &str) -> Option<Utf8PathBuf> {
if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
return None;
}
let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
let pf86 = Utf8PathBuf::from_path_buf(
env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?.into(),
)
.unwrap();
let cdb_arch = if cfg!(target_arch = "x86") {
"x86"
} else if cfg!(target_arch = "x86_64") {
@ -96,8 +99,7 @@ fn find_cdb(target: &str) -> Option<OsString> {
return None; // No compatible CDB.exe in the Windows 10 SDK
};
let mut path = PathBuf::new();
path.push(pf86);
let mut path = pf86;
path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
path.push(cdb_arch);
path.push(r"cdb.exe");
@ -106,15 +108,15 @@ fn find_cdb(target: &str) -> Option<OsString> {
return None;
}
Some(path.into_os_string())
Some(path)
}
/// Returns Path to CDB
pub(crate) fn analyze_cdb(
cdb: Option<String>,
target: &str,
) -> (Option<OsString>, Option<[u16; 4]>) {
let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
) -> (Option<Utf8PathBuf>, Option<[u16; 4]>) {
let cdb = cdb.map(Utf8PathBuf::from).or_else(|| find_cdb(target));
let mut version = None;
if let Some(cdb) = cdb.as_ref() {
@ -143,7 +145,7 @@ pub(crate) fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
pub(crate) fn analyze_gdb(
gdb: Option<String>,
target: &str,
android_cross_path: &Path,
android_cross_path: &Utf8Path,
) -> (Option<String>, Option<u32>) {
#[cfg(not(windows))]
const GDB_FALLBACK: &str = "gdb";
@ -152,10 +154,7 @@ pub(crate) fn analyze_gdb(
let fallback_gdb = || {
if is_android_gdb_target(target) {
let mut gdb_path = match android_cross_path.to_str() {
Some(x) => x.to_owned(),
None => panic!("cannot find android cross path"),
};
let mut gdb_path = android_cross_path.to_string();
gdb_path.push_str("/bin/gdb");
gdb_path
} else {

View file

@ -2,9 +2,9 @@ use std::fmt;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
use std::path::Path;
use std::sync::OnceLock;
use camino::Utf8Path;
use regex::Regex;
use tracing::*;
@ -102,8 +102,8 @@ impl Error {
///
/// If revision is not None, then we look
/// for `//[X]~` instead, where `X` is the current revision.
pub fn load_errors(testfile: &Path, revision: Option<&str>) -> Vec<Error> {
let rdr = BufReader::new(File::open(testfile).unwrap());
pub fn load_errors(testfile: &Utf8Path, revision: Option<&str>) -> Vec<Error> {
let rdr = BufReader::new(File::open(testfile.as_std_path()).unwrap());
// `last_nonfollow_error` tracks the most recently seen
// line with an error template that did not use the

View file

@ -3,9 +3,9 @@ use std::env;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf};
use semver::Version;
use tracing::*;
@ -45,12 +45,12 @@ pub struct EarlyProps {
}
impl EarlyProps {
pub fn from_file(config: &Config, testfile: &Path) -> Self {
let file = File::open(testfile).expect("open test file to parse earlyprops");
pub fn from_file(config: &Config, testfile: &Utf8Path) -> Self {
let file = File::open(testfile.as_std_path()).expect("open test file to parse earlyprops");
Self::from_reader(config, testfile, file)
}
pub fn from_reader<R: Read>(config: &Config, testfile: &Path, rdr: R) -> Self {
pub fn from_reader<R: Read>(config: &Config, testfile: &Utf8Path, rdr: R) -> Self {
let mut props = EarlyProps::default();
let mut poisoned = false;
iter_header(
@ -66,7 +66,7 @@ impl EarlyProps {
);
if poisoned {
eprintln!("errors encountered during EarlyProps parsing: {}", testfile.display());
eprintln!("errors encountered during EarlyProps parsing: {}", testfile);
panic!("errors encountered during EarlyProps parsing");
}
@ -88,7 +88,7 @@ pub struct TestProps {
pub doc_flags: Vec<String>,
// If present, the name of a file that this test should match when
// pretty-printed
pub pp_exact: Option<PathBuf>,
pub pp_exact: Option<Utf8PathBuf>,
/// Auxiliary crates that should be built and made available to this test.
pub(crate) aux: AuxProps,
// Environment settings to use for compiling
@ -134,7 +134,7 @@ pub struct TestProps {
// not set by end-users; rather it is set by the incremental
// testing harness and used when generating compilation
// arguments. (In particular, it propagates to the aux-builds.)
pub incremental_dir: Option<PathBuf>,
pub incremental_dir: Option<Utf8PathBuf>,
// If `true`, this test will use incremental compilation.
//
// This can be set manually with the `incremental` header, or implicitly
@ -305,7 +305,12 @@ impl TestProps {
}
}
pub fn from_aux_file(&self, testfile: &Path, revision: Option<&str>, config: &Config) -> Self {
pub fn from_aux_file(
&self,
testfile: &Utf8Path,
revision: Option<&str>,
config: &Config,
) -> Self {
let mut props = TestProps::new();
// copy over select properties to the aux build:
@ -316,10 +321,10 @@ impl TestProps {
props
}
pub fn from_file(testfile: &Path, revision: Option<&str>, config: &Config) -> Self {
pub fn from_file(testfile: &Utf8Path, revision: Option<&str>, config: &Config) -> Self {
let mut props = TestProps::new();
props.load_from(testfile, revision, config);
props.exec_env.push(("RUSTC".to_string(), config.rustc_path.display().to_string()));
props.exec_env.push(("RUSTC".to_string(), config.rustc_path.to_string()));
match (props.pass_mode, props.fail_mode) {
(None, None) if config.mode == Mode::Ui => props.fail_mode = Some(FailMode::Check),
@ -334,10 +339,10 @@ impl TestProps {
/// tied to a particular revision `foo` (indicated by writing
/// `//@[foo]`), then the property is ignored unless `test_revision` is
/// `Some("foo")`.
fn load_from(&mut self, testfile: &Path, test_revision: Option<&str>, config: &Config) {
fn load_from(&mut self, testfile: &Utf8Path, test_revision: Option<&str>, config: &Config) {
let mut has_edition = false;
if !testfile.is_dir() {
let file = File::open(testfile).unwrap();
let file = File::open(testfile.as_std_path()).unwrap();
let mut poisoned = false;
@ -594,7 +599,7 @@ impl TestProps {
);
if poisoned {
eprintln!("errors encountered during TestProps parsing: {}", testfile.display());
eprintln!("errors encountered during TestProps parsing: {}", testfile);
panic!("errors encountered during TestProps parsing");
}
}
@ -865,7 +870,7 @@ fn iter_header(
mode: Mode,
_suite: &str,
poisoned: &mut bool,
testfile: &Path,
testfile: &Utf8Path,
rdr: impl Read,
it: &mut dyn FnMut(DirectiveLine<'_>),
) {
@ -917,9 +922,7 @@ fn iter_header(
eprintln!(
"error: detected unknown compiletest test directive `{}` in {}:{}",
directive_line.raw_directive,
testfile.display(),
line_number,
directive_line.raw_directive, testfile, line_number,
);
return;
@ -931,10 +934,7 @@ fn iter_header(
eprintln!(
"error: detected trailing compiletest test directive `{}` in {}:{}\n \
help: put the trailing directive in it's own line: `//@ {}`",
trailing_directive,
testfile.display(),
line_number,
trailing_directive,
trailing_directive, testfile, line_number, trailing_directive,
);
return;
@ -946,7 +946,12 @@ fn iter_header(
}
impl Config {
fn parse_and_update_revisions(&self, testfile: &Path, line: &str, existing: &mut Vec<String>) {
fn parse_and_update_revisions(
&self,
testfile: &Utf8Path,
line: &str,
existing: &mut Vec<String>,
) {
const FORBIDDEN_REVISION_NAMES: [&str; 2] = [
// `//@ revisions: true false` Implying `--cfg=true` and `--cfg=false` makes it very
// weird for the test, since if the test writer wants a cfg of the same revision name
@ -959,26 +964,19 @@ impl Config {
if let Some(raw) = self.parse_name_value_directive(line, "revisions") {
if self.mode == Mode::RunMake {
panic!("`run-make` tests do not support revisions: {}", testfile.display());
panic!("`run-make` tests do not support revisions: {}", testfile);
}
let mut duplicates: HashSet<_> = existing.iter().cloned().collect();
for revision in raw.split_whitespace() {
if !duplicates.insert(revision.to_string()) {
panic!(
"duplicate revision: `{}` in line `{}`: {}",
revision,
raw,
testfile.display()
);
panic!("duplicate revision: `{}` in line `{}`: {}", revision, raw, testfile);
}
if FORBIDDEN_REVISION_NAMES.contains(&revision) {
panic!(
"revision name `{revision}` is not permitted: `{}` in line `{}`: {}",
revision,
raw,
testfile.display()
revision, raw, testfile
);
}
@ -989,8 +987,7 @@ impl Config {
"revision name `{revision}` is not permitted in a test suite that uses \
`FileCheck` annotations as it is confusing when used as custom `FileCheck` \
prefix: `{revision}` in line `{}`: {}",
raw,
testfile.display()
raw, testfile
);
}
@ -1010,11 +1007,11 @@ impl Config {
(name.to_owned(), value.to_owned())
}
fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option<Utf8PathBuf> {
if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
Some(PathBuf::from(&s))
Some(Utf8PathBuf::from(&s))
} else if self.parse_name_directive(line, "pp-exact") {
testfile.file_name().map(PathBuf::from)
testfile.file_name().map(Utf8PathBuf::from)
} else {
None
}
@ -1120,20 +1117,19 @@ fn expand_variables(mut value: String, config: &Config) -> String {
if value.contains(CWD) {
let cwd = env::current_dir().unwrap();
value = value.replace(CWD, &cwd.to_string_lossy());
value = value.replace(CWD, &cwd.to_str().unwrap());
}
if value.contains(SRC_BASE) {
value = value.replace(SRC_BASE, &config.src_test_suite_root.to_str().unwrap());
value = value.replace(SRC_BASE, &config.src_test_suite_root.as_str());
}
if value.contains(TEST_SUITE_BUILD_BASE) {
value =
value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.to_str().unwrap());
value = value.replace(TEST_SUITE_BUILD_BASE, &config.build_test_suite_root.as_str());
}
if value.contains(SYSROOT_BASE) {
value = value.replace(SYSROOT_BASE, &config.sysroot_base.to_str().unwrap());
value = value.replace(SYSROOT_BASE, &config.sysroot_base.as_str());
}
if value.contains(TARGET_LINKER) {
@ -1146,9 +1142,9 @@ fn expand_variables(mut value: String, config: &Config) -> String {
if value.contains(RUST_SRC_BASE) {
let src_base = config.sysroot_base.join("lib/rustlib/src/rust");
src_base.try_exists().expect(&*format!("{} should exists", src_base.display()));
let src_base = src_base.read_link().unwrap_or(src_base);
value = value.replace(RUST_SRC_BASE, &src_base.to_string_lossy());
src_base.try_exists().expect(&*format!("{} should exists", src_base));
let src_base = src_base.read_link_utf8().unwrap_or(src_base);
value = value.replace(RUST_SRC_BASE, &src_base.as_str());
}
value
@ -1251,14 +1247,14 @@ pub fn llvm_has_libzstd(config: &Config) -> bool {
// contains a path to that static lib, and that it exists.
//
// See compiler/rustc_llvm/build.rs for more details and similar expectations.
fn is_zstd_in_config(llvm_bin_dir: &Path) -> Option<()> {
fn is_zstd_in_config(llvm_bin_dir: &Utf8Path) -> Option<()> {
let llvm_config_path = llvm_bin_dir.join("llvm-config");
let output = Command::new(llvm_config_path).arg("--system-libs").output().ok()?;
assert!(output.status.success(), "running llvm-config --system-libs failed");
let libs = String::from_utf8(output.stdout).ok()?;
for lib in libs.split_whitespace() {
if lib.ends_with("libzstd.a") && Path::new(lib).exists() {
if lib.ends_with("libzstd.a") && Utf8Path::new(lib).exists() {
return Some(());
}
}
@ -1276,7 +1272,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool {
// `lld` supports it. If not, an error will be emitted: "LLVM was not built with
// LLVM_ENABLE_ZSTD or did not find zstd at build time".
#[cfg(unix)]
fn is_lld_built_with_zstd(llvm_bin_dir: &Path) -> Option<()> {
fn is_lld_built_with_zstd(llvm_bin_dir: &Utf8Path) -> Option<()> {
let lld_path = llvm_bin_dir.join("lld");
if lld_path.exists() {
// We can't call `lld` as-is, it expects to be invoked by a compiler driver using a
@ -1312,7 +1308,7 @@ pub fn llvm_has_libzstd(config: &Config) -> bool {
}
#[cfg(not(unix))]
fn is_lld_built_with_zstd(_llvm_bin_dir: &Path) -> Option<()> {
fn is_lld_built_with_zstd(_llvm_bin_dir: &Utf8Path) -> Option<()> {
None
}
@ -1379,7 +1375,7 @@ pub(crate) fn make_test_description<R: Read>(
config: &Config,
cache: &HeadersCache,
name: String,
path: &Path,
path: &Utf8Path,
src: R,
test_revision: Option<&str>,
poisoned: &mut bool,
@ -1410,7 +1406,7 @@ pub(crate) fn make_test_description<R: Read>(
ignore_message = Some(reason.into());
}
IgnoreDecision::Error { message } => {
eprintln!("error: {}:{line_number}: {message}", path.display());
eprintln!("error: {}:{line_number}: {message}", path);
*poisoned = true;
return;
}
@ -1440,7 +1436,7 @@ pub(crate) fn make_test_description<R: Read>(
);
if local_poisoned {
eprintln!("errors encountered when trying to make test description: {}", path.display());
eprintln!("errors encountered when trying to make test description: {}", path);
panic!("errors encountered when trying to make test description");
}
@ -1549,7 +1545,7 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision {
IgnoreDecision::Continue
}
fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision {
fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision {
if let Some(needed_components) =
config.parse_name_value_directive(line, "needs-llvm-components")
{
@ -1561,8 +1557,7 @@ fn ignore_llvm(config: &Config, path: &Path, line: &str) -> IgnoreDecision {
if env::var_os("COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS").is_some() {
panic!(
"missing LLVM component {}, and COMPILETEST_REQUIRE_ALL_LLVM_COMPONENTS is set: {}",
missing_component,
path.display()
missing_component, path
);
}
return IgnoreDecision::Ignore {

View file

@ -1,6 +1,6 @@
use std::io::Read;
use std::path::Path;
use camino::Utf8Path;
use semver::Version;
use super::{
@ -13,7 +13,7 @@ use crate::executor::{CollectedTestDesc, ShouldPanic};
fn make_test_description<R: Read>(
config: &Config,
name: String,
path: &Path,
path: &Utf8Path,
src: R,
revision: Option<&str>,
) -> CollectedTestDesc {
@ -230,12 +230,12 @@ fn cfg() -> ConfigBuilder {
fn parse_rs(config: &Config, contents: &str) -> EarlyProps {
let bytes = contents.as_bytes();
EarlyProps::from_reader(config, Path::new("a.rs"), bytes)
EarlyProps::from_reader(config, Utf8Path::new("a.rs"), bytes)
}
fn check_ignore(config: &Config, contents: &str) -> bool {
let tn = String::new();
let p = Path::new("a.rs");
let p = Utf8Path::new("a.rs");
let d = make_test_description(&config, tn, p, std::io::Cursor::new(contents), None);
d.ignore
}
@ -244,7 +244,7 @@ fn check_ignore(config: &Config, contents: &str) -> bool {
fn should_fail() {
let config: Config = cfg().build();
let tn = String::new();
let p = Path::new("a.rs");
let p = Utf8Path::new("a.rs");
let d = make_test_description(&config, tn.clone(), p, std::io::Cursor::new(""), None);
assert_eq!(d.should_panic, ShouldPanic::No);
@ -784,7 +784,7 @@ fn threads_support() {
}
}
fn run_path(poisoned: &mut bool, path: &Path, buf: &[u8]) {
fn run_path(poisoned: &mut bool, path: &Utf8Path, buf: &[u8]) {
let rdr = std::io::Cursor::new(&buf);
iter_header(Mode::Ui, "ui", poisoned, path, rdr, &mut |_| {});
}
@ -794,7 +794,7 @@ fn test_unknown_directive_check() {
let mut poisoned = false;
run_path(
&mut poisoned,
Path::new("a.rs"),
Utf8Path::new("a.rs"),
include_bytes!("./test-auxillary/unknown_directive.rs"),
);
assert!(poisoned);
@ -805,7 +805,7 @@ fn test_known_directive_check_no_error() {
let mut poisoned = false;
run_path(
&mut poisoned,
Path::new("a.rs"),
Utf8Path::new("a.rs"),
include_bytes!("./test-auxillary/known_directive.rs"),
);
assert!(!poisoned);
@ -816,7 +816,7 @@ fn test_error_annotation_no_error() {
let mut poisoned = false;
run_path(
&mut poisoned,
Path::new("a.rs"),
Utf8Path::new("a.rs"),
include_bytes!("./test-auxillary/error_annotation.rs"),
);
assert!(!poisoned);
@ -827,7 +827,7 @@ fn test_non_rs_unknown_directive_not_checked() {
let mut poisoned = false;
run_path(
&mut poisoned,
Path::new("a.Makefile"),
Utf8Path::new("a.Makefile"),
include_bytes!("./test-auxillary/not_rs.Makefile"),
);
assert!(!poisoned);
@ -836,21 +836,21 @@ fn test_non_rs_unknown_directive_not_checked() {
#[test]
fn test_trailing_directive() {
let mut poisoned = false;
run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm");
run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm");
assert!(poisoned);
}
#[test]
fn test_trailing_directive_with_comment() {
let mut poisoned = false;
run_path(&mut poisoned, Path::new("a.rs"), b"//@ only-x86 only-arm with comment");
run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ only-x86 only-arm with comment");
assert!(poisoned);
}
#[test]
fn test_not_trailing_directive() {
let mut poisoned = false;
run_path(&mut poisoned, Path::new("a.rs"), b"//@ revisions: incremental");
run_path(&mut poisoned, Utf8Path::new("a.rs"), b"//@ revisions: incremental");
assert!(!poisoned);
}

View file

@ -22,16 +22,15 @@ pub mod util;
use core::panic;
use std::collections::HashSet;
use std::ffi::OsString;
use std::fmt::Write;
use std::io::{self, ErrorKind};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::{Arc, OnceLock};
use std::time::SystemTime;
use std::{env, fs, vec};
use build_helper::git::{get_git_modified_files, get_git_untracked_files};
use camino::{Utf8Path, Utf8PathBuf};
use getopts::Options;
use tracing::*;
use walkdir::WalkDir;
@ -230,15 +229,19 @@ pub fn parse_config(args: Vec<String>) -> Config {
panic!()
}
fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
match m.opt_str(nm) {
Some(s) => PathBuf::from(&s),
None => panic!("no option (=path) found for {}", nm),
fn make_absolute(path: Utf8PathBuf) -> Utf8PathBuf {
if path.is_relative() {
Utf8PathBuf::try_from(env::current_dir().unwrap()).unwrap().join(path)
} else {
path
}
}
fn make_absolute(path: PathBuf) -> PathBuf {
if path.is_relative() { env::current_dir().unwrap().join(path) } else { path }
fn opt_path(m: &getopts::Matches, nm: &str) -> Utf8PathBuf {
match m.opt_str(nm) {
Some(s) => Utf8PathBuf::from(&s),
None => panic!("no option (=path) found for {}", nm),
}
}
let target = opt_str2(matches.opt_str("target"));
@ -279,12 +282,12 @@ pub fn parse_config(args: Vec<String>) -> Config {
.free
.iter()
.map(|f| {
let path = Path::new(f);
let path = Utf8Path::new(f);
let mut iter = path.iter().skip(1);
// We skip the test folder and check if the user passed `rmake.rs`.
if iter.next().is_some_and(|s| s == "rmake.rs") && iter.next().is_none() {
path.parent().unwrap().to_str().unwrap().to_string()
path.parent().unwrap().to_string()
} else {
f.to_string()
}
@ -316,8 +319,8 @@ pub fn parse_config(args: Vec<String>) -> Config {
assert!(
src_test_suite_root.starts_with(&src_root),
"`src-root` must be a parent of `src-test-suite-root`: `src-root`=`{}`, `src-test-suite-root` = `{}`",
src_root.display(),
src_test_suite_root.display()
src_root,
src_test_suite_root
);
let build_root = opt_path(matches, "build-root");
@ -332,16 +335,16 @@ pub fn parse_config(args: Vec<String>) -> Config {
compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
rustc_path: opt_path(matches, "rustc-path"),
cargo_path: matches.opt_str("cargo-path").map(PathBuf::from),
stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(PathBuf::from),
rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
coverage_dump_path: matches.opt_str("coverage-dump-path").map(PathBuf::from),
cargo_path: matches.opt_str("cargo-path").map(Utf8PathBuf::from),
stage0_rustc_path: matches.opt_str("stage0-rustc-path").map(Utf8PathBuf::from),
rustdoc_path: matches.opt_str("rustdoc-path").map(Utf8PathBuf::from),
coverage_dump_path: matches.opt_str("coverage-dump-path").map(Utf8PathBuf::from),
python: matches.opt_str("python").unwrap(),
jsondocck_path: matches.opt_str("jsondocck-path"),
jsondoclint_path: matches.opt_str("jsondoclint-path"),
run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"),
llvm_filecheck: matches.opt_str("llvm-filecheck").map(PathBuf::from),
llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(PathBuf::from),
llvm_filecheck: matches.opt_str("llvm-filecheck").map(Utf8PathBuf::from),
llvm_bin_dir: matches.opt_str("llvm-bin-dir").map(Utf8PathBuf::from),
src_root,
src_test_suite_root,
@ -407,7 +410,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
},
only_modified: matches.opt_present("only-modified"),
color,
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
remote_test_client: matches.opt_str("remote-test-client").map(Utf8PathBuf::from),
compare_mode,
rustfix_coverage: matches.opt_present("rustfix-coverage"),
has_html_tidy,
@ -450,19 +453,19 @@ pub fn parse_config(args: Vec<String>) -> Config {
pub fn log_config(config: &Config) {
let c = config;
logv(c, "configuration:".to_string());
logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
logv(c, format!("run_lib_path: {}", config.run_lib_path));
logv(c, format!("rustc_path: {}", config.rustc_path));
logv(c, format!("cargo_path: {:?}", config.cargo_path));
logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
logv(c, format!("src_root: {}", config.src_root.display()));
logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root.display()));
logv(c, format!("src_root: {}", config.src_root));
logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root));
logv(c, format!("build_root: {}", config.build_root.display()));
logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root.display()));
logv(c, format!("build_root: {}", config.build_root));
logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root));
logv(c, format!("sysroot_base: {}", config.sysroot_base.display()));
logv(c, format!("sysroot_base: {}", config.sysroot_base));
logv(c, format!("stage: {}", config.stage));
logv(c, format!("stage_id: {}", config.stage_id));
@ -480,16 +483,16 @@ pub fn log_config(config: &Config) {
logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags));
logv(c, format!("target: {}", config.target));
logv(c, format!("host: {}", config.host));
logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display()));
logv(c, format!("adb_path: {:?}", config.adb_path));
logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
logv(c, format!("android-cross-path: {}", config.android_cross_path));
logv(c, format!("adb_path: {}", config.adb_path));
logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}", config.adb_device_status));
logv(c, format!("ar: {}", config.ar));
logv(c, format!("target-linker: {:?}", config.target_linker));
logv(c, format!("host-linker: {:?}", config.host_linker));
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("format: {:?}", config.format));
logv(c, format!("minicore_path: {:?}", config.minicore_path.display()));
logv(c, format!("minicore_path: {}", config.minicore_path));
logv(c, "\n".to_string());
}
@ -517,7 +520,7 @@ pub fn run_tests(config: Arc<Config>) {
coverage_file_path.push("rustfix_missing_coverage.txt");
if coverage_file_path.exists() {
if let Err(e) = fs::remove_file(&coverage_file_path) {
panic!("Could not delete {} due to {}", coverage_file_path.display(), e)
panic!("Could not delete {} due to {}", coverage_file_path, e)
}
}
}
@ -619,13 +622,13 @@ struct TestCollectorCx {
config: Arc<Config>,
cache: HeadersCache,
common_inputs_stamp: Stamp,
modified_tests: Vec<PathBuf>,
modified_tests: Vec<Utf8PathBuf>,
}
/// Mutable state used during test collection.
struct TestCollector {
tests: Vec<CollectedTest>,
found_path_stems: HashSet<PathBuf>,
found_path_stems: HashSet<Utf8PathBuf>,
poisoned: bool,
}
@ -635,14 +638,13 @@ struct TestCollector {
/// regardless of whether any filters/tests were specified on the command-line,
/// because filtering is handled later by libtest.
pub(crate) fn collect_and_make_tests(config: Arc<Config>) -> Vec<CollectedTest> {
debug!("making tests from {}", config.src_test_suite_root.display());
debug!("making tests from {}", config.src_test_suite_root);
let common_inputs_stamp = common_inputs_stamp(&config);
let modified_tests =
modified_tests(&config, &config.src_test_suite_root).unwrap_or_else(|err| {
panic!(
"modified_tests got error from dir: {}, error: {}",
config.src_test_suite_root.display(),
err
config.src_test_suite_root, err
)
});
let cache = HeadersCache::load(&config);
@ -651,12 +653,9 @@ pub(crate) fn collect_and_make_tests(config: Arc<Config>) -> Vec<CollectedTest>
let mut collector =
TestCollector { tests: vec![], found_path_stems: HashSet::new(), poisoned: false };
collect_tests_from_dir(&cx, &mut collector, &cx.config.src_test_suite_root, Path::new(""))
collect_tests_from_dir(&cx, &mut collector, &cx.config.src_test_suite_root, Utf8Path::new(""))
.unwrap_or_else(|reason| {
panic!(
"Could not read tests from {}: {reason}",
cx.config.src_test_suite_root.display()
)
panic!("Could not read tests from {}: {reason}", cx.config.src_test_suite_root)
});
let TestCollector { tests, found_path_stems, poisoned } = collector;
@ -725,24 +724,29 @@ fn common_inputs_stamp(config: &Config) -> Stamp {
/// the `--only-modified` flag is in use.
///
/// (Might be inaccurate in some cases.)
fn modified_tests(config: &Config, dir: &Path) -> Result<Vec<PathBuf>, String> {
fn modified_tests(config: &Config, dir: &Utf8Path) -> Result<Vec<Utf8PathBuf>, String> {
// If `--only-modified` wasn't passed, the list of modified tests won't be
// used for anything, so avoid some work and just return an empty list.
if !config.only_modified {
return Ok(vec![]);
}
let files =
get_git_modified_files(&config.git_config(), Some(dir), &vec!["rs", "stderr", "fixed"])?;
let files = get_git_modified_files(
&config.git_config(),
Some(dir.as_std_path()),
&vec!["rs", "stderr", "fixed"],
)?;
// Add new test cases to the list, it will be convenient in daily development.
let untracked_files = get_git_untracked_files(&config.git_config(), None)?.unwrap_or(vec![]);
let all_paths = [&files[..], &untracked_files[..]].concat();
let full_paths = {
let mut full_paths: Vec<PathBuf> = all_paths
let mut full_paths: Vec<Utf8PathBuf> = all_paths
.into_iter()
.map(|f| PathBuf::from(f).with_extension("").with_extension("rs"))
.filter_map(|f| if Path::new(&f).exists() { f.canonicalize().ok() } else { None })
.map(|f| Utf8PathBuf::from(f).with_extension("").with_extension("rs"))
.filter_map(
|f| if Utf8Path::new(&f).exists() { f.canonicalize_utf8().ok() } else { None },
)
.collect();
full_paths.dedup();
full_paths.sort_unstable();
@ -756,8 +760,8 @@ fn modified_tests(config: &Config, dir: &Path) -> Result<Vec<PathBuf>, String> {
fn collect_tests_from_dir(
cx: &TestCollectorCx,
collector: &mut TestCollector,
dir: &Path,
relative_dir_path: &Path,
dir: &Utf8Path,
relative_dir_path: &Utf8Path,
) -> io::Result<()> {
// Ignore directories that contain a file named `compiletest-ignore-dir`.
if dir.join("compiletest-ignore-dir").exists() {
@ -790,16 +794,16 @@ fn collect_tests_from_dir(
// subdirectories we find, except for `auxiliary` directories.
// FIXME: this walks full tests tree, even if we have something to ignore
// use walkdir/ignore like in tidy?
for file in fs::read_dir(dir)? {
for file in fs::read_dir(dir.as_std_path())? {
let file = file?;
let file_path = file.path();
let file_name = file.file_name();
let file_path = Utf8PathBuf::try_from(file.path()).unwrap();
let file_name = file_path.file_name().unwrap();
if is_test(&file_name)
if is_test(file_name)
&& (!cx.config.only_modified || cx.modified_tests.contains(&file_path))
{
// We found a test file, so create the corresponding libtest structures.
debug!("found test file: {:?}", file_path.display());
debug!(%file_path, "found test file");
// Record the stem of the test file, to check for overlaps later.
let rel_test_path = relative_dir_path.join(file_path.file_stem().unwrap());
@ -810,22 +814,20 @@ fn collect_tests_from_dir(
make_test(cx, collector, &paths);
} else if file_path.is_dir() {
// Recurse to find more tests in a subdirectory.
let relative_file_path = relative_dir_path.join(file.file_name());
if &file_name != "auxiliary" {
debug!("found directory: {:?}", file_path.display());
let relative_file_path = relative_dir_path.join(file_name);
if file_name != "auxiliary" {
debug!(%file_path, "found directory");
collect_tests_from_dir(cx, collector, &file_path, &relative_file_path)?;
}
} else {
debug!("found other file/directory: {:?}", file_path.display());
debug!(%file_path, "found other file/directory");
}
}
Ok(())
}
/// Returns true if `file_name` looks like a proper test file name.
pub fn is_test(file_name: &OsString) -> bool {
let file_name = file_name.to_str().unwrap();
pub fn is_test(file_name: &str) -> bool {
if !file_name.ends_with(".rs") {
return false;
}
@ -844,7 +846,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te
let test_path = if cx.config.mode == Mode::RunMake {
testpaths.file.join("rmake.rs")
} else {
PathBuf::from(&testpaths.file)
testpaths.file.clone()
};
// Scan the test file to discover its revisions, if any.
@ -899,7 +901,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te
/// The path of the `stamp` file that gets created or updated whenever a
/// particular test completes successfully.
fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
fn stamp_file_path(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> Utf8PathBuf {
output_base_dir(config, testpaths, revision).join("stamp")
}
@ -912,7 +914,7 @@ fn files_related_to_test(
testpaths: &TestPaths,
props: &EarlyProps,
revision: Option<&str>,
) -> Vec<PathBuf> {
) -> Vec<Utf8PathBuf> {
let mut related = vec![];
if testpaths.file.is_dir() {
@ -920,7 +922,7 @@ fn files_related_to_test(
for entry in WalkDir::new(&testpaths.file) {
let path = entry.unwrap().into_path();
if path.is_file() {
related.push(path);
related.push(Utf8PathBuf::try_from(path).unwrap());
}
}
} else {
@ -991,7 +993,7 @@ struct Stamp {
impl Stamp {
/// Creates a timestamp holding the last-modified time of the specified file.
fn from_path(path: &Path) -> Self {
fn from_path(path: &Utf8Path) -> Self {
let mut stamp = Stamp { time: SystemTime::UNIX_EPOCH };
stamp.add_path(path);
stamp
@ -999,8 +1001,8 @@ impl Stamp {
/// Updates this timestamp to the last-modified time of the specified file,
/// if it is later than the currently-stored timestamp.
fn add_path(&mut self, path: &Path) {
let modified = fs::metadata(path)
fn add_path(&mut self, path: &Utf8Path) {
let modified = fs::metadata(path.as_std_path())
.and_then(|metadata| metadata.modified())
.unwrap_or(SystemTime::UNIX_EPOCH);
self.time = self.time.max(modified);
@ -1009,7 +1011,8 @@ impl Stamp {
/// Updates this timestamp to the most recent last-modified time of all files
/// recursively contained in the given directory, if it is later than the
/// currently-stored timestamp.
fn add_dir(&mut self, path: &Path) {
fn add_dir(&mut self, path: &Utf8Path) {
let path = path.as_std_path();
for entry in WalkDir::new(path) {
let entry = entry.unwrap();
if entry.file_type().is_file() {
@ -1042,7 +1045,7 @@ fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>
config.mode,
debugger,
mode_suffix,
path.display(),
path,
revision.map_or("".to_string(), |rev| format!("#{}", rev))
)
}
@ -1064,7 +1067,7 @@ fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>
/// To avoid problems, we forbid test names from overlapping in this way.
///
/// See <https://github.com/rust-lang/rust/pull/109509> for more context.
fn check_for_overlapping_test_paths(found_path_stems: &HashSet<PathBuf>) {
fn check_for_overlapping_test_paths(found_path_stems: &HashSet<Utf8PathBuf>) {
let mut collisions = Vec::new();
for path in found_path_stems {
for ancestor in path.ancestors().skip(1) {
@ -1077,7 +1080,7 @@ fn check_for_overlapping_test_paths(found_path_stems: &HashSet<PathBuf>) {
collisions.sort();
let collisions: String = collisions
.into_iter()
.map(|(path, check_parent)| format!("test {path:?} clashes with {check_parent:?}\n"))
.map(|(path, check_parent)| format!("test {path} clashes with {check_parent}\n"))
.collect();
panic!(
"{collisions}\n\

View file

@ -1,15 +1,15 @@
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::ffi::{OsStr, OsString};
use std::ffi::OsString;
use std::fs::{self, File, create_dir_all};
use std::hash::{DefaultHasher, Hash, Hasher};
use std::io::prelude::*;
use std::io::{self, BufReader};
use std::path::{Path, PathBuf};
use std::process::{Child, Command, ExitStatus, Output, Stdio};
use std::sync::Arc;
use std::{env, iter, str};
use camino::{Utf8Path, Utf8PathBuf};
use colored::Colorize;
use regex::{Captures, Regex};
use tracing::*;
@ -25,7 +25,7 @@ use crate::compute_diff::{DiffLine, make_diff, write_diff, write_filtered_diff};
use crate::errors::{Error, ErrorKind};
use crate::header::TestProps;
use crate::read2::{Truncated, read2_abbreviated};
use crate::util::{PathBufExt, add_dylib_path, logv, static_regex};
use crate::util::{Utf8PathBufExt, add_dylib_path, logv, static_regex};
use crate::{ColorConfig, json, stamp_file_path};
mod debugger;
@ -131,7 +131,7 @@ pub fn run(config: Arc<Config>, testpaths: &TestPaths, revision: Option<&str>) {
// We're going to be dumping a lot of info. Start on a new line.
print!("\n\n");
}
debug!("running {:?}", testpaths.file.display());
debug!("running {}", testpaths.file);
let mut props = TestProps::from_file(&testpaths.file, revision, &config);
// For non-incremental (i.e. regular UI) tests, the incremental directory
@ -144,7 +144,7 @@ pub fn run(config: Arc<Config>, testpaths: &TestPaths, revision: Option<&str>) {
let cx = TestCx { config: &config, props: &props, testpaths, revision };
if let Err(e) = create_dir_all(&cx.output_base_dir()) {
panic!("failed to create output base directory {}: {e}", cx.output_base_dir().display());
panic!("failed to create output base directory {}: {e}", cx.output_base_dir());
}
if props.incremental {
@ -207,7 +207,8 @@ pub fn compute_stamp_hash(config: &Config) -> String {
format!("{:x}", hash.finish())
}
fn remove_and_create_dir_all(path: &Path) {
fn remove_and_create_dir_all(path: &Utf8Path) {
let path = path.as_std_path();
let _ = fs::remove_dir_all(path);
fs::create_dir_all(path).unwrap();
}
@ -423,7 +424,7 @@ impl<'test> TestCx<'test> {
let aux_dir = self.aux_output_dir_name();
let input: &str = match read_from {
ReadFrom::Stdin(_) => "-",
ReadFrom::Path => self.testpaths.file.to_str().unwrap(),
ReadFrom::Path => self.testpaths.file.as_str(),
};
let mut rustc = Command::new(&self.config.rustc_path);
@ -590,10 +591,7 @@ impl<'test> TestCx<'test> {
// FIXME(#65865)
return;
} else {
self.fatal(&format!(
"no error pattern specified in {:?}",
self.testpaths.file.display()
));
self.fatal(&format!("no error pattern specified in {}", self.testpaths.file));
}
}
@ -697,17 +695,17 @@ impl<'test> TestCx<'test> {
}
// On Windows, translate all '\' path separators to '/'
let file_name = format!("{}", self.testpaths.file.display()).replace(r"\", "/");
let file_name = self.testpaths.file.to_string().replace(r"\", "/");
// On Windows, keep all '\' path separators to match the paths reported in the JSON output
// from the compiler
let diagnostic_file_name = if self.props.remap_src_base {
let mut p = PathBuf::from(FAKE_SRC_BASE);
let mut p = Utf8PathBuf::from(FAKE_SRC_BASE);
p.push(&self.testpaths.relative_dir);
p.push(self.testpaths.file.file_name().unwrap());
p.display().to_string()
p.to_string()
} else {
self.testpaths.file.display().to_string()
self.testpaths.file.to_string()
};
// Errors and warnings are always expected, other diagnostics are only expected
@ -873,7 +871,7 @@ impl<'test> TestCx<'test> {
/// `root_out_dir` and `root_testpaths` refer to the parameters of the actual test being run.
/// Auxiliaries, no matter how deep, have the same root_out_dir and root_testpaths.
fn document(&self, root_out_dir: &Path, root_testpaths: &TestPaths) -> ProcRes {
fn document(&self, root_out_dir: &Utf8Path, root_testpaths: &TestPaths) -> ProcRes {
if self.props.build_aux_docs {
for rel_ab in &self.props.aux.builds {
let aux_testpaths = self.compute_aux_test_paths(root_testpaths, rel_ab);
@ -902,13 +900,13 @@ impl<'test> TestCx<'test> {
// actual --out-dir given to the auxiliary or test, as opposed to the root out dir for the entire
// test
let out_dir: Cow<'_, Path> = if self.props.unique_doc_out_dir {
let out_dir: Cow<'_, Utf8Path> = if self.props.unique_doc_out_dir {
let file_name = self.testpaths.file.file_stem().expect("file name should not be empty");
let out_dir = PathBuf::from_iter([
let out_dir = Utf8PathBuf::from_iter([
root_out_dir,
Path::new("docs"),
Path::new(file_name),
Path::new("doc"),
Utf8Path::new("docs"),
Utf8Path::new(file_name),
Utf8Path::new("doc"),
]);
create_dir_all(&out_dir).unwrap();
Cow::Owned(out_dir)
@ -921,7 +919,7 @@ impl<'test> TestCx<'test> {
rustdoc.current_dir(current_dir);
rustdoc
.arg("-L")
.arg(self.config.run_lib_path.to_str().unwrap())
.arg(self.config.run_lib_path.as_path())
.arg("-L")
.arg(aux_dir)
.arg("-o")
@ -1059,7 +1057,7 @@ impl<'test> TestCx<'test> {
let test_ab =
of.file.parent().expect("test file path has no parent").join("auxiliary").join(rel_ab);
if !test_ab.exists() {
self.fatal(&format!("aux-build `{}` source not found", test_ab.display()))
self.fatal(&format!("aux-build `{}` source not found", test_ab))
}
TestPaths {
@ -1096,7 +1094,7 @@ impl<'test> TestCx<'test> {
|| !self.props.aux.proc_macros.is_empty()
}
fn aux_output_dir(&self) -> PathBuf {
fn aux_output_dir(&self) -> Utf8PathBuf {
let aux_dir = self.aux_output_dir_name();
if !self.props.aux.builds.is_empty() {
@ -1112,7 +1110,7 @@ impl<'test> TestCx<'test> {
aux_dir
}
fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Path, rustc: &mut Command) {
fn build_all_auxiliary(&self, of: &TestPaths, aux_dir: &Utf8Path, rustc: &mut Command) {
for rel_ab in &self.props.aux.builds {
self.build_auxiliary(of, rel_ab, &aux_dir, None);
}
@ -1132,12 +1130,7 @@ impl<'test> TestCx<'test> {
|rustc: &mut Command, aux_name: &str, aux_path: &str, aux_type: AuxType| {
let lib_name = get_lib_name(&path_to_crate_name(aux_path), aux_type);
if let Some(lib_name) = lib_name {
rustc.arg("--extern").arg(format!(
"{}={}/{}",
aux_name,
aux_dir.display(),
lib_name
));
rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir, lib_name));
}
};
@ -1158,7 +1151,7 @@ impl<'test> TestCx<'test> {
let aux_type = self.build_auxiliary(of, aux_file, aux_dir, None);
if let Some(lib_name) = get_lib_name(aux_file.trim_end_matches(".rs"), aux_type) {
let lib_path = aux_dir.join(&lib_name);
rustc.arg(format!("-Zcodegen-backend={}", lib_path.display()));
rustc.arg(format!("-Zcodegen-backend={}", lib_path));
}
}
}
@ -1174,7 +1167,7 @@ impl<'test> TestCx<'test> {
if self.props.add_core_stubs {
let minicore_path = self.build_minicore();
rustc.arg("--extern");
rustc.arg(&format!("minicore={}", minicore_path.to_str().unwrap()));
rustc.arg(&format!("minicore={}", minicore_path));
}
let aux_dir = self.aux_output_dir();
@ -1192,7 +1185,7 @@ impl<'test> TestCx<'test> {
/// Builds `minicore`. Returns the path to the minicore rlib within the base test output
/// directory.
fn build_minicore(&self) -> PathBuf {
fn build_minicore(&self) -> Utf8PathBuf {
let output_file_path = self.output_base_dir().join("libminicore.rlib");
let mut rustc = self.make_compile_args(
&self.config.minicore_path,
@ -1209,10 +1202,7 @@ impl<'test> TestCx<'test> {
let res = self.compose_and_run(rustc, self.config.compile_lib_path.as_path(), None, None);
if !res.status.success() {
self.fatal_proc_rec(
&format!(
"auxiliary build of {:?} failed to compile: ",
self.config.minicore_path.display()
),
&format!("auxiliary build of {} failed to compile: ", self.config.minicore_path),
&res,
);
}
@ -1227,7 +1217,7 @@ impl<'test> TestCx<'test> {
&self,
of: &TestPaths,
source_path: &str,
aux_dir: &Path,
aux_dir: &Utf8Path,
aux_type: Option<AuxType>,
) -> AuxType {
let aux_testpaths = self.compute_aux_test_paths(of, source_path);
@ -1324,10 +1314,7 @@ impl<'test> TestCx<'test> {
);
if !auxres.status.success() {
self.fatal_proc_rec(
&format!(
"auxiliary build of {:?} failed to compile: ",
aux_testpaths.file.display()
),
&format!("auxiliary build of {} failed to compile: ", aux_testpaths.file),
&auxres,
);
}
@ -1336,8 +1323,8 @@ impl<'test> TestCx<'test> {
fn read2_abbreviated(&self, child: Child) -> (Output, Truncated) {
let mut filter_paths_from_len = Vec::new();
let mut add_path = |path: &Path| {
let path = path.display().to_string();
let mut add_path = |path: &Utf8Path| {
let path = path.to_string();
let windows = path.replace("\\", "\\\\");
if windows != path {
filter_paths_from_len.push(windows);
@ -1359,8 +1346,8 @@ impl<'test> TestCx<'test> {
fn compose_and_run(
&self,
mut command: Command,
lib_path: &Path,
aux_path: Option<&Path>,
lib_path: &Utf8Path,
aux_path: Option<&Utf8Path>,
input: Option<String>,
) -> ProcRes {
let cmdline = {
@ -1405,9 +1392,9 @@ impl<'test> TestCx<'test> {
matches!(self.config.suite.as_str(), "rustdoc-ui" | "rustdoc-js" | "rustdoc-json")
}
fn get_mir_dump_dir(&self) -> PathBuf {
fn get_mir_dump_dir(&self) -> Utf8PathBuf {
let mut mir_dump_dir = self.config.build_test_suite_root.clone();
debug!("input_file: {:?}", self.testpaths.file);
debug!("input_file: {}", self.testpaths.file);
mir_dump_dir.push(&self.testpaths.relative_dir);
mir_dump_dir.push(self.testpaths.file.file_stem().unwrap());
mir_dump_dir
@ -1415,7 +1402,7 @@ impl<'test> TestCx<'test> {
fn make_compile_args(
&self,
input_file: &Path,
input_file: &Utf8Path,
output_file: TargetLocation,
emit: Emit,
allow_unused: AllowUnused,
@ -1456,7 +1443,7 @@ impl<'test> TestCx<'test> {
// Similarly, vendored sources shouldn't be shown when running from a dist tarball.
rustc.arg("-Z").arg(format!(
"ignore-directory-in-diagnostics-source-blocks={}",
self.config.src_root.join("vendor").to_str().unwrap(),
self.config.src_root.join("vendor"),
));
// Optionally prevent default --sysroot if specified in test compile-flags.
@ -1480,7 +1467,7 @@ impl<'test> TestCx<'test> {
if !is_rustdoc {
if let Some(ref incremental_dir) = self.props.incremental_dir {
rustc.args(&["-C", &format!("incremental={}", incremental_dir.display())]);
rustc.args(&["-C", &format!("incremental={}", incremental_dir)]);
rustc.args(&["-Z", "incremental-verify-ich"]);
}
@ -1524,7 +1511,7 @@ impl<'test> TestCx<'test> {
let mir_dump_dir = self.get_mir_dump_dir();
remove_and_create_dir_all(&mir_dump_dir);
let mut dir_opt = "-Zdump-mir-dir=".to_string();
dir_opt.push_str(mir_dump_dir.to_str().unwrap());
dir_opt.push_str(mir_dump_dir.as_str());
debug!("dir_opt: {:?}", dir_opt);
rustc.arg(dir_opt);
};
@ -1617,8 +1604,7 @@ impl<'test> TestCx<'test> {
if self.props.remap_src_base {
rustc.arg(format!(
"--remap-path-prefix={}={}",
self.config.src_test_suite_root.to_str().unwrap(),
FAKE_SRC_BASE,
self.config.src_test_suite_root, FAKE_SRC_BASE,
));
}
@ -1741,7 +1727,7 @@ impl<'test> TestCx<'test> {
rustc
}
fn make_exe_name(&self) -> PathBuf {
fn make_exe_name(&self) -> Utf8PathBuf {
// Using a single letter here to keep the path length down for
// Windows. Some test names get very long. rustc creates `rcgu`
// files with the module name appended to it which can more than
@ -1792,7 +1778,7 @@ impl<'test> TestCx<'test> {
}
}
fn make_cmdline(&self, command: &Command, libpath: &Path) -> String {
fn make_cmdline(&self, command: &Command, libpath: &Utf8Path) -> String {
use crate::util;
// Linux and mac don't require adjusting the library search path
@ -1805,7 +1791,7 @@ impl<'test> TestCx<'test> {
format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
}
format!("{} {:?}", lib_path_cmd_prefix(libpath.to_str().unwrap()), command)
format!("{} {:?}", lib_path_cmd_prefix(libpath.as_str()), command)
}
}
@ -1819,20 +1805,19 @@ impl<'test> TestCx<'test> {
return;
}
let path = Path::new(proc_name);
let path = Utf8Path::new(proc_name);
let proc_name = if path.file_stem().is_some_and(|p| p == "rmake") {
OsString::from_iter(
String::from_iter(
path.parent()
.unwrap()
.file_name()
.into_iter()
.chain(Some(OsStr::new("/")))
.chain(Some("/"))
.chain(path.file_name()),
)
} else {
path.file_name().unwrap().into()
};
let proc_name = proc_name.to_string_lossy();
println!("------{proc_name} stdout------------------------------");
println!("{}", out);
println!("------{proc_name} stderr------------------------------");
@ -1842,18 +1827,18 @@ impl<'test> TestCx<'test> {
fn dump_output_file(&self, out: &str, extension: &str) {
let outfile = self.make_out_name(extension);
fs::write(&outfile, out).unwrap();
fs::write(outfile.as_std_path(), out).unwrap();
}
/// Creates a filename for output with the given extension.
/// E.g., `/.../testname.revision.mode/testname.extension`.
fn make_out_name(&self, extension: &str) -> PathBuf {
fn make_out_name(&self, extension: &str) -> Utf8PathBuf {
self.output_base_name().with_extension(extension)
}
/// Gets the directory where auxiliary files are written.
/// E.g., `/.../testname.revision.mode/auxiliary/`.
fn aux_output_dir_name(&self) -> PathBuf {
fn aux_output_dir_name(&self) -> Utf8PathBuf {
self.output_base_dir()
.join("auxiliary")
.with_extra_extension(self.config.mode.aux_dir_disambiguator())
@ -1861,12 +1846,12 @@ impl<'test> TestCx<'test> {
/// Gets the directory where auxiliary binaries are written.
/// E.g., `/.../testname.revision.mode/auxiliary/bin`.
fn aux_bin_output_dir_name(&self) -> PathBuf {
fn aux_bin_output_dir_name(&self) -> Utf8PathBuf {
self.aux_output_dir_name().join("bin")
}
/// Generates a unique name for the test, such as `testname.revision.mode`.
fn output_testname_unique(&self) -> PathBuf {
fn output_testname_unique(&self) -> Utf8PathBuf {
output_testname_unique(self.config, self.testpaths, self.safe_revision())
}
@ -1879,14 +1864,14 @@ impl<'test> TestCx<'test> {
/// Gets the absolute path to the directory where all output for the given
/// test/revision should reside.
/// E.g., `/path/to/build/host-tuple/test/ui/relative/testname.revision.mode/`.
fn output_base_dir(&self) -> PathBuf {
fn output_base_dir(&self) -> Utf8PathBuf {
output_base_dir(self.config, self.testpaths, self.safe_revision())
}
/// Gets the absolute path to the base filename used as output for the given
/// test/revision.
/// E.g., `/.../relative/testname.revision.mode/testname`.
fn output_base_name(&self) -> PathBuf {
fn output_base_name(&self) -> Utf8PathBuf {
output_base_name(self.config, self.testpaths, self.safe_revision())
}
@ -1921,7 +1906,7 @@ impl<'test> TestCx<'test> {
// codegen tests (using FileCheck)
fn compile_test_and_save_ir(&self) -> (ProcRes, PathBuf) {
fn compile_test_and_save_ir(&self) -> (ProcRes, Utf8PathBuf) {
let output_path = self.output_base_name().with_extension("ll");
let input_file = &self.testpaths.file;
let rustc = self.make_compile_args(
@ -1937,7 +1922,7 @@ impl<'test> TestCx<'test> {
(proc_res, output_path)
}
fn verify_with_filecheck(&self, output: &Path) -> ProcRes {
fn verify_with_filecheck(&self, output: &Utf8Path) -> ProcRes {
let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap());
filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file);
@ -1967,7 +1952,7 @@ impl<'test> TestCx<'test> {
filecheck.args(&self.props.filecheck_flags);
// FIXME(jieyouxu): don't pass an empty Path
self.compose_and_run(filecheck, Path::new(""), None, None)
self.compose_and_run(filecheck, Utf8Path::new(""), None, None)
}
fn charset() -> &'static str {
@ -1975,7 +1960,7 @@ impl<'test> TestCx<'test> {
if cfg!(target_os = "freebsd") { "ISO-8859-1" } else { "UTF-8" }
}
fn compare_to_default_rustdoc(&mut self, out_dir: &Path) {
fn compare_to_default_rustdoc(&mut self, out_dir: &Utf8Path) {
if !self.config.has_html_tidy {
return;
}
@ -2127,12 +2112,8 @@ impl<'test> TestCx<'test> {
};
}
fn get_lines<P: AsRef<Path>>(
&self,
path: &P,
mut other_files: Option<&mut Vec<String>>,
) -> Vec<usize> {
let content = fs::read_to_string(&path).unwrap();
fn get_lines(&self, path: &Utf8Path, mut other_files: Option<&mut Vec<String>>) -> Vec<usize> {
let content = fs::read_to_string(path.as_std_path()).unwrap();
let mut ignore = false;
content
.lines()
@ -2178,8 +2159,8 @@ impl<'test> TestCx<'test> {
for other_file in other_files {
let mut path = self.testpaths.file.clone();
path.set_file_name(&format!("{}.rs", other_file));
let path = fs::canonicalize(path).expect("failed to canonicalize");
let normalized = path.to_str().unwrap().replace('\\', "/");
let path = path.canonicalize_utf8().expect("failed to canonicalize");
let normalized = path.as_str().replace('\\', "/");
files.insert(normalized, self.get_lines(&path, None));
}
@ -2363,26 +2344,24 @@ impl<'test> TestCx<'test> {
let mut normalized = output.to_string();
let mut normalize_path = |from: &Path, to: &str| {
let mut from = from.display().to_string();
if json {
from = from.replace("\\", "\\\\");
}
normalized = normalized.replace(&from, to);
let mut normalize_path = |from: &Utf8Path, to: &str| {
let from = if json { &from.as_str().replace("\\", "\\\\") } else { from.as_str() };
normalized = normalized.replace(from, to);
};
let parent_dir = self.testpaths.file.parent().unwrap();
normalize_path(parent_dir, "$DIR");
if self.props.remap_src_base {
let mut remapped_parent_dir = PathBuf::from(FAKE_SRC_BASE);
if self.testpaths.relative_dir != Path::new("") {
let mut remapped_parent_dir = Utf8PathBuf::from(FAKE_SRC_BASE);
if self.testpaths.relative_dir != Utf8Path::new("") {
remapped_parent_dir.push(&self.testpaths.relative_dir);
}
normalize_path(&remapped_parent_dir, "$DIR");
}
let base_dir = Path::new("/rustc/FAKE_PREFIX");
let base_dir = Utf8Path::new("/rustc/FAKE_PREFIX");
// Fake paths into the libstd/libcore
normalize_path(&base_dir.join("library"), "$SRC_DIR");
// `ui-fulldeps` tests can show paths to the compiler source when testing macros from
@ -2392,8 +2371,8 @@ impl<'test> TestCx<'test> {
// Real paths into the libstd/libcore
let rust_src_dir = &self.config.sysroot_base.join("lib/rustlib/src/rust");
rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir.display()));
let rust_src_dir = rust_src_dir.read_link().unwrap_or(rust_src_dir.to_path_buf());
rust_src_dir.try_exists().expect(&*format!("{} should exists", rust_src_dir));
let rust_src_dir = rust_src_dir.read_link_utf8().unwrap_or(rust_src_dir.to_path_buf());
normalize_path(&rust_src_dir.join("library"), "$SRC_DIR_REAL");
// eg.
@ -2533,7 +2512,7 @@ impl<'test> TestCx<'test> {
.replace("\r\n", "\n")
}
fn expected_output_path(&self, kind: &str) -> PathBuf {
fn expected_output_path(&self, kind: &str) -> Utf8PathBuf {
let mut path =
expected_output_path(&self.testpaths, self.revision, &self.config.compare_mode, kind);
@ -2562,19 +2541,18 @@ impl<'test> TestCx<'test> {
}
}
fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
fs::read_to_string(path).map_err(|err| {
format!("failed to load expected output from `{}`: {}", path.display(), err)
})
fn load_expected_output_from_path(&self, path: &Utf8Path) -> Result<String, String> {
fs::read_to_string(path)
.map_err(|err| format!("failed to load expected output from `{}`: {}", path, err))
}
fn delete_file(&self, file: &Path) {
fn delete_file(&self, file: &Utf8Path) {
if !file.exists() {
// Deleting a nonexistent file would error.
return;
}
if let Err(e) = fs::remove_file(file) {
self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,));
if let Err(e) = fs::remove_file(file.as_std_path()) {
self.fatal(&format!("failed to delete `{}`: {}", file, e,));
}
}
@ -2680,8 +2658,8 @@ impl<'test> TestCx<'test> {
fn show_diff(
&self,
stream: &str,
expected_path: &Path,
actual_path: &Path,
expected_path: &Utf8Path,
actual_path: &Utf8Path,
expected: &str,
actual: &str,
actual_unnormalized: &str,
@ -2820,7 +2798,7 @@ impl<'test> TestCx<'test> {
fs::create_dir_all(&incremental_dir).unwrap();
if self.config.verbose {
println!("init_incremental_test: incremental_dir={}", incremental_dir.display());
println!("init_incremental_test: incremental_dir={incremental_dir}");
}
}
}
@ -2878,8 +2856,8 @@ impl ProcRes {
#[derive(Debug)]
enum TargetLocation {
ThisFile(PathBuf),
ThisDirectory(PathBuf),
ThisFile(Utf8PathBuf),
ThisDirectory(Utf8PathBuf),
}
enum AllowUnused {

View file

@ -1,4 +1,4 @@
use std::path::PathBuf;
use camino::Utf8PathBuf;
use super::{AllowUnused, Emit, LinkToAux, ProcRes, TargetLocation, TestCx};
@ -19,7 +19,7 @@ impl TestCx<'_> {
}
}
fn compile_test_and_save_assembly(&self) -> (ProcRes, PathBuf) {
fn compile_test_and_save_assembly(&self) -> (ProcRes, Utf8PathBuf) {
// This works with both `--emit asm` (as default output name for the assembly)
// and `ptx-linker` because the latter can write output at requested location.
let output_path = self.output_base_name().with_extension("s");

View file

@ -26,9 +26,7 @@ impl TestCx<'_> {
.stdout
.lines()
.filter(|line| line.starts_with(PREFIX))
.map(|line| {
line.replace(&self.testpaths.file.display().to_string(), "TEST_PATH").to_string()
})
.map(|line| line.replace(&self.testpaths.file.as_str(), "TEST_PATH").to_string())
.map(|line| str_to_mono_item(&line, true))
.collect();

View file

@ -1,9 +1,9 @@
//! Code specific to the coverage test suites.
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf};
use glob::glob;
use crate::common::{UI_COVERAGE, UI_COVERAGE_MAP};
@ -11,7 +11,7 @@ use crate::runtest::{Emit, ProcRes, TestCx, WillExecute};
use crate::util::static_regex;
impl<'test> TestCx<'test> {
fn coverage_dump_path(&self) -> &Path {
fn coverage_dump_path(&self) -> &Utf8Path {
self.config
.coverage_dump_path
.as_deref()
@ -79,10 +79,8 @@ impl<'test> TestCx<'test> {
std::fs::remove_file(&profdata_path).unwrap();
}
let proc_res = self.exec_compiled_test_general(
&[("LLVM_PROFILE_FILE", &profraw_path.to_str().unwrap())],
false,
);
let proc_res =
self.exec_compiled_test_general(&[("LLVM_PROFILE_FILE", profraw_path.as_str())], false);
if self.props.failure_status.is_some() {
self.check_correct_failure_status(&proc_res);
} else if !proc_res.status.success() {
@ -158,8 +156,8 @@ impl<'test> TestCx<'test> {
/// `.profraw` files and doctest executables to the given vectors.
fn run_doctests_for_coverage(
&self,
profraw_paths: &mut Vec<PathBuf>,
bin_paths: &mut Vec<PathBuf>,
profraw_paths: &mut Vec<Utf8PathBuf>,
bin_paths: &mut Vec<Utf8PathBuf>,
) {
// Put .profraw files and doctest executables in dedicated directories,
// to make it easier to glob them all later.
@ -204,10 +202,9 @@ impl<'test> TestCx<'test> {
self.fatal_proc_rec("rustdoc --test failed!", &proc_res)
}
fn glob_iter(path: impl AsRef<Path>) -> impl Iterator<Item = PathBuf> {
let path_str = path.as_ref().to_str().unwrap();
let iter = glob(path_str).unwrap();
iter.map(Result::unwrap)
fn glob_iter(path: impl AsRef<Utf8Path>) -> impl Iterator<Item = Utf8PathBuf> {
let iter = glob(path.as_ref().as_str()).unwrap();
iter.map(Result::unwrap).map(Utf8PathBuf::try_from).map(Result::unwrap)
}
// Find all profraw files in the profraw directory.

View file

@ -1,7 +1,8 @@
use std::fmt::Write;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use camino::{Utf8Path, Utf8PathBuf};
use crate::common::Config;
use crate::runtest::ProcRes;
@ -15,11 +16,15 @@ pub(super) struct DebuggerCommands {
/// Contains the source line number to check and the line itself
check_lines: Vec<(usize, String)>,
/// Source file name
file: PathBuf,
file: Utf8PathBuf,
}
impl DebuggerCommands {
pub fn parse_from(file: &Path, config: &Config, debugger_prefix: &str) -> Result<Self, String> {
pub fn parse_from(
file: &Utf8Path,
config: &Config,
debugger_prefix: &str,
) -> Result<Self, String> {
let command_directive = format!("{debugger_prefix}-command");
let check_directive = format!("{debugger_prefix}-check");
@ -27,7 +32,7 @@ impl DebuggerCommands {
let mut commands = vec![];
let mut check_lines = vec![];
let mut counter = 0;
let reader = BufReader::new(File::open(file).unwrap());
let reader = BufReader::new(File::open(file.as_std_path()).unwrap());
for (line_no, line) in reader.lines().enumerate() {
counter += 1;
let line = line.map_err(|e| format!("Error while parsing debugger commands: {}", e))?;
@ -50,7 +55,7 @@ impl DebuggerCommands {
}
}
Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_owned() })
Ok(Self { commands, breakpoint_lines, check_lines, file: file.to_path_buf() })
}
/// Given debugger output and lines to check, ensure that every line is
@ -81,10 +86,10 @@ impl DebuggerCommands {
if missing.is_empty() {
Ok(())
} else {
let fname = self.file.file_name().unwrap().to_string_lossy();
let fname = self.file.file_name().unwrap();
let mut msg = format!(
"check directive(s) from `{}` not found in debugger output. errors:",
self.file.display()
self.file
);
for (src_lineno, err_line) in missing {

View file

@ -1,9 +1,9 @@
use std::ffi::{OsStr, OsString};
use std::fs::File;
use std::io::{BufRead, BufReader, Read};
use std::path::Path;
use std::process::{Command, Output, Stdio};
use camino::Utf8Path;
use tracing::debug;
use super::debugger::DebuggerCommands;
@ -73,11 +73,11 @@ impl TestCx<'_> {
let mut js_extension = self.testpaths.file.clone();
js_extension.set_extension("cdb.js");
if js_extension.exists() {
script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension.to_string_lossy()));
script_str.push_str(&format!(".scriptload \"{}\"\n", js_extension));
}
// Set breakpoints on every line that contains the string "#break"
let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
let source_file_name = self.testpaths.file.file_name().unwrap();
for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line));
}
@ -151,16 +151,11 @@ impl TestCx<'_> {
if is_android_gdb_target(&self.config.target) {
cmds = cmds.replace("run", "continue");
let tool_path = match self.config.android_cross_path.to_str() {
Some(x) => x.to_owned(),
None => self.fatal("cannot find android cross path"),
};
// write debugger script
let mut script_str = String::with_capacity(2048);
script_str.push_str(&format!("set charset {}\n", Self::charset()));
script_str.push_str(&format!("set sysroot {}\n", tool_path));
script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
script_str.push_str(&format!("set sysroot {}\n", &self.config.android_cross_path));
script_str.push_str(&format!("file {}\n", exe_file));
script_str.push_str("target remote :5039\n");
script_str.push_str(&format!(
"set solib-search-path \
@ -169,12 +164,8 @@ impl TestCx<'_> {
));
for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(
format!(
"break {:?}:{}\n",
self.testpaths.file.file_name().unwrap().to_string_lossy(),
*line
)
.as_str(),
format!("break {}:{}\n", self.testpaths.file.file_name().unwrap(), *line)
.as_str(),
);
}
script_str.push_str(&cmds);
@ -203,7 +194,7 @@ impl TestCx<'_> {
self.config.adb_test_dir.clone(),
if self.config.target.contains("aarch64") { "64" } else { "" },
self.config.adb_test_dir.clone(),
exe_file.file_name().unwrap().to_str().unwrap()
exe_file.file_name().unwrap()
);
debug!("adb arg: {}", adb_arg);
@ -242,7 +233,7 @@ impl TestCx<'_> {
let mut gdb = Command::new(&format!("{}-gdb", self.config.target));
gdb.args(debugger_opts);
// FIXME(jieyouxu): don't pass an empty Path
let cmdline = self.make_cmdline(&gdb, Path::new(""));
let cmdline = self.make_cmdline(&gdb, Utf8Path::new(""));
logv(self.config, format!("executing {}", cmdline));
cmdline
};
@ -259,7 +250,6 @@ impl TestCx<'_> {
}
} else {
let rust_pp_module_abs_path = self.config.src_root.join("src").join("etc");
let rust_pp_module_abs_path = rust_pp_module_abs_path.to_str().unwrap();
// write debugger script
let mut script_str = String::with_capacity(2048);
script_str.push_str(&format!("set charset {}\n", Self::charset()));
@ -274,17 +264,15 @@ impl TestCx<'_> {
// GDB's script auto loading safe path
script_str.push_str(&format!(
"add-auto-load-safe-path {}\n",
rust_pp_module_abs_path.replace(r"\", r"\\")
rust_pp_module_abs_path.as_str().replace(r"\", r"\\")
));
let output_base_dir = self.output_base_dir().to_str().unwrap().to_owned();
// Add the directory containing the output binary to
// include embedded pretty printers to GDB's script
// auto loading safe path
script_str.push_str(&format!(
"add-auto-load-safe-path {}\n",
output_base_dir.replace(r"\", r"\\")
self.output_base_dir().as_str().replace(r"\", r"\\")
));
}
}
@ -301,12 +289,13 @@ impl TestCx<'_> {
script_str.push_str("set print pretty off\n");
// Add the pretty printer directory to GDB's source-file search path
script_str
.push_str(&format!("directory {}\n", rust_pp_module_abs_path.replace(r"\", r"\\")));
script_str.push_str(&format!(
"directory {}\n",
rust_pp_module_abs_path.as_str().replace(r"\", r"\\")
));
// Load the target executable
script_str
.push_str(&format!("file {}\n", exe_file.to_str().unwrap().replace(r"\", r"\\")));
script_str.push_str(&format!("file {}\n", exe_file.as_str().replace(r"\", r"\\")));
// Force GDB to print values in the Rust format.
script_str.push_str("set language rust\n");
@ -315,7 +304,7 @@ impl TestCx<'_> {
for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!(
"break '{}':{}\n",
self.testpaths.file.file_name().unwrap().to_string_lossy(),
self.testpaths.file.file_name().unwrap(),
*line
));
}
@ -410,14 +399,14 @@ impl TestCx<'_> {
script_str.push_str(&format!(
"command script import {}/lldb_lookup.py\n",
rust_pp_module_abs_path.to_str().unwrap()
rust_pp_module_abs_path
));
File::open(rust_pp_module_abs_path.join("lldb_commands"))
.and_then(|mut file| file.read_to_string(&mut script_str))
.expect("Failed to read lldb_commands");
// Set breakpoints on every line that contains the string "#break"
let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy();
let source_file_name = self.testpaths.file.file_name().unwrap();
for line in &dbg_cmds.breakpoint_lines {
script_str.push_str(&format!(
"breakpoint set --file '{}' --line {}\n",
@ -451,7 +440,7 @@ impl TestCx<'_> {
}
}
fn run_lldb(&self, test_executable: &Path, debugger_script: &Path) -> ProcRes {
fn run_lldb(&self, test_executable: &Utf8Path, debugger_script: &Utf8Path) -> ProcRes {
// Prepare the lldb_batchmode which executes the debugger script
let lldb_script_path = self.config.src_root.join("src/etc/lldb_batchmode.py");
let pythonpath = if let Ok(pp) = std::env::var("PYTHONPATH") {

View file

@ -9,8 +9,7 @@ impl TestCx<'_> {
self.document(&out_dir, &self.testpaths);
let file_stem =
self.testpaths.file.file_stem().and_then(|f| f.to_str()).expect("no file stem");
let file_stem = self.testpaths.file.file_stem().expect("no file stem");
let res = self.run_command_to_procres(
Command::new(&nodejs)
.arg(self.config.src_root.join("src/tools/rustdoc-js/tester.js"))

View file

@ -1,6 +1,6 @@
use std::fs;
use std::path::{Path, PathBuf};
use camino::{Utf8Path, Utf8PathBuf};
use glob::glob;
use miropt_test_tools::{MiroptTest, MiroptTestFile, files_for_miropt_test};
use tracing::debug;
@ -14,7 +14,7 @@ impl TestCx<'_> {
let should_run = self.should_run(pm);
let mut test_info = files_for_miropt_test(
&self.testpaths.file,
&self.testpaths.file.as_std_path(),
self.config.get_pointer_width(),
self.config.target_cfg().panic.for_miropt_test_tools(),
);
@ -38,20 +38,15 @@ impl TestCx<'_> {
fn check_mir_dump(&self, test_info: MiroptTest) {
let test_dir = self.testpaths.file.parent().unwrap();
let test_crate =
self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace('-', "_");
let test_crate = self.testpaths.file.file_stem().unwrap().replace('-', "_");
let MiroptTest { run_filecheck, suffix, files, passes: _ } = test_info;
if self.config.bless {
for e in
glob(&format!("{}/{}.*{}.mir", test_dir.display(), test_crate, suffix)).unwrap()
{
for e in glob(&format!("{}/{}.*{}.mir", test_dir, test_crate, suffix)).unwrap() {
fs::remove_file(e.unwrap()).unwrap();
}
for e in
glob(&format!("{}/{}.*{}.diff", test_dir.display(), test_crate, suffix)).unwrap()
{
for e in glob(&format!("{}/{}.*{}.diff", test_dir, test_crate, suffix)).unwrap() {
fs::remove_file(e.unwrap()).unwrap();
}
}
@ -60,19 +55,15 @@ impl TestCx<'_> {
let dumped_string = if let Some(after) = to_file {
self.diff_mir_files(from_file.into(), after.into())
} else {
let mut output_file = PathBuf::new();
let mut output_file = Utf8PathBuf::new();
output_file.push(self.get_mir_dump_dir());
output_file.push(&from_file);
debug!(
"comparing the contents of: {} with {}",
output_file.display(),
expected_file.display()
);
debug!("comparing the contents of: {} with {:?}", output_file, expected_file);
if !output_file.exists() {
panic!(
"Output file `{}` from test does not exist, available files are in `{}`",
output_file.display(),
output_file.parent().unwrap().display()
output_file,
output_file.parent().unwrap()
);
}
self.check_mir_test_timestamp(&from_file, &output_file);
@ -107,21 +98,20 @@ impl TestCx<'_> {
}
}
fn diff_mir_files(&self, before: PathBuf, after: PathBuf) -> String {
let to_full_path = |path: PathBuf| {
fn diff_mir_files(&self, before: Utf8PathBuf, after: Utf8PathBuf) -> String {
let to_full_path = |path: Utf8PathBuf| {
let full = self.get_mir_dump_dir().join(&path);
if !full.exists() {
panic!(
"the mir dump file for {} does not exist (requested in {})",
path.display(),
self.testpaths.file.display(),
path, self.testpaths.file,
);
}
full
};
let before = to_full_path(before);
let after = to_full_path(after);
debug!("comparing the contents of: {} with {}", before.display(), after.display());
debug!("comparing the contents of: {} with {}", before, after);
let before = fs::read_to_string(before).unwrap();
let after = fs::read_to_string(after).unwrap();
let before = self.normalize_output(&before, &[]);
@ -138,8 +128,8 @@ impl TestCx<'_> {
dumped_string
}
fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Path) {
let t = |file| fs::metadata(file).unwrap().modified().unwrap();
fn check_mir_test_timestamp(&self, test_name: &str, output_file: &Utf8Path) {
let t = |file: &Utf8Path| fs::metadata(file.as_std_path()).unwrap().modified().unwrap();
let source_file = &self.testpaths.file;
let output_time = t(output_file);
let source_time = t(source_file);
@ -147,8 +137,7 @@ impl TestCx<'_> {
debug!("source file time: {:?} output file time: {:?}", source_time, output_time);
panic!(
"test source file `{}` is newer than potentially stale output file `{}`.",
source_file.display(),
test_name
source_file, test_name
);
}
}

View file

@ -1,8 +1,8 @@
use std::path::Path;
use std::process::{Command, Output, Stdio};
use std::{env, fs};
use build_helper::fs::{ignore_not_found, recursive_remove};
use camino::{Utf8Path, Utf8PathBuf};
use super::{ProcRes, TestCx, disable_error_reporting};
use crate::util::{copy_dir_all, dylib_env_var};
@ -39,14 +39,16 @@ impl TestCx<'_> {
// Copy all input files (apart from rmake.rs) to the temporary directory,
// so that the input directory structure from `tests/run-make/<test>` is mirrored
// to the `rmake_out` directory.
for path in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) {
let path = path.unwrap().path().to_path_buf();
for entry in walkdir::WalkDir::new(&self.testpaths.file).min_depth(1) {
let entry = entry.unwrap();
let path = entry.path();
let path = <&Utf8Path>::try_from(path).unwrap();
if path.file_name().is_some_and(|s| s != "rmake.rs") {
let target = rmake_out_dir.join(path.strip_prefix(&self.testpaths.file).unwrap());
if path.is_dir() {
copy_dir_all(&path, target).unwrap();
copy_dir_all(&path, &target).unwrap();
} else {
fs::copy(&path, target).unwrap();
fs::copy(path.as_std_path(), target).unwrap();
}
}
}
@ -83,8 +85,10 @@ impl TestCx<'_> {
// on some linux distros.
// 2. Specific library paths in `self.config.compile_lib_path` needed for running rustc.
let base_dylib_search_paths =
Vec::from_iter(env::split_paths(&env::var(dylib_env_var()).unwrap()));
let base_dylib_search_paths = Vec::from_iter(
env::split_paths(&env::var(dylib_env_var()).unwrap())
.map(|p| Utf8PathBuf::try_from(p).expect("dylib env var contains non-UTF8 paths")),
);
// Calculate the paths of the recipe binary. As previously discussed, this is placed at
// `<base_dir>/<bin_name>` with `bin_name` being `rmake` or `rmake.exe` depending on
@ -113,13 +117,13 @@ impl TestCx<'_> {
.arg("-o")
.arg(&recipe_bin)
// Specify library search paths for `run_make_support`.
.arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap().to_string_lossy()))
.arg(format!("-Ldependency={}", &support_lib_deps.to_string_lossy()))
.arg(format!("-Ldependency={}", &support_lib_deps_deps.to_string_lossy()))
.arg(format!("-Ldependency={}", &support_lib_path.parent().unwrap()))
.arg(format!("-Ldependency={}", &support_lib_deps))
.arg(format!("-Ldependency={}", &support_lib_deps_deps))
// Provide `run_make_support` as extern prelude, so test writers don't need to write
// `extern run_make_support;`.
.arg("--extern")
.arg(format!("run_make_support={}", &support_lib_path.to_string_lossy()))
.arg(format!("run_make_support={}", &support_lib_path))
.arg("--edition=2021")
.arg(&self.testpaths.file.join("rmake.rs"))
.arg("-Cprefer-dynamic");
@ -240,7 +244,7 @@ impl TestCx<'_> {
if self.config.target.contains("msvc") && !self.config.cc.is_empty() {
// We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe`
// and that `lib.exe` lives next to it.
let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
let lib = Utf8Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
// MSYS doesn't like passing flags of the form `/foo` as it thinks it's
// a path and instead passes `C:\msys64\foo`, so convert all
@ -262,8 +266,8 @@ impl TestCx<'_> {
cmd.env("IS_MSVC", "1")
.env("IS_WINDOWS", "1")
.env("MSVC_LIB", format!("'{}' -nologo", lib.display()))
.env("MSVC_LIB_PATH", format!("{}", lib.display()))
.env("MSVC_LIB", format!("'{}' -nologo", lib))
.env("MSVC_LIB_PATH", &lib)
// Note: we diverge from legacy run_make and don't lump `CC` the compiler and
// default flags together.
.env("CC_DEFAULT_FLAGS", &cflags)

View file

@ -68,7 +68,7 @@ impl TestCx<'_> {
{
let mut coverage_file_path = self.config.build_test_suite_root.clone();
coverage_file_path.push("rustfix_missing_coverage.txt");
debug!("coverage_file_path: {}", coverage_file_path.display());
debug!("coverage_file_path: {}", coverage_file_path);
let mut file = OpenOptions::new()
.create(true)
@ -76,8 +76,8 @@ impl TestCx<'_> {
.open(coverage_file_path.as_path())
.expect("could not create or open file");
if let Err(e) = writeln!(file, "{}", self.testpaths.file.display()) {
panic!("couldn't write to {}: {e:?}", coverage_file_path.display());
if let Err(e) = writeln!(file, "{}", self.testpaths.file) {
panic!("couldn't write to {}: {e:?}", coverage_file_path);
}
}
} else if self.props.run_rustfix {
@ -119,7 +119,7 @@ impl TestCx<'_> {
self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap());
println!(
"To only update this specific test, also pass `--test-args {}`",
relative_path_to_file.display(),
relative_path_to_file,
);
self.fatal_proc_rec(
&format!("{} errors occurred comparing output.", errors),
@ -211,8 +211,6 @@ impl TestCx<'_> {
let crate_name =
self.testpaths.file.file_stem().expect("test must have a file stem");
// crate name must be alphanumeric or `_`.
let crate_name =
crate_name.to_str().expect("crate name implies file name must be valid UTF-8");
// replace `a.foo` -> `a__foo` for crate name purposes.
// replace `revision-name-with-dashes` -> `revision_name_with_underscore`
let crate_name = crate_name.replace('.', "__");

View file

@ -1,5 +1,3 @@
use std::ffi::OsString;
use crate::debuggers::{extract_gdb_version, extract_lldb_version};
use crate::is_test;
@ -60,11 +58,11 @@ fn test_extract_lldb_version() {
#[test]
fn is_test_test() {
assert!(is_test(&OsString::from("a_test.rs")));
assert!(!is_test(&OsString::from(".a_test.rs")));
assert!(!is_test(&OsString::from("a_cat.gif")));
assert!(!is_test(&OsString::from("#a_dog_gif")));
assert!(!is_test(&OsString::from("~a_temp_file")));
assert!(is_test("a_test.rs"));
assert!(!is_test(".a_test.rs"));
assert!(!is_test("a_cat.gif"));
assert!(!is_test("#a_dog_gif"));
assert!(!is_test("~a_temp_file"));
}
#[test]

View file

@ -1,8 +1,7 @@
use std::env;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::process::Command;
use camino::{Utf8Path, Utf8PathBuf};
use tracing::*;
use crate::common::Config;
@ -34,21 +33,21 @@ pub fn logv(config: &Config, s: String) {
}
}
pub trait PathBufExt {
pub trait Utf8PathBufExt {
/// Append an extension to the path, even if it already has one.
fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf;
fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf;
}
impl PathBufExt for PathBuf {
fn with_extra_extension<S: AsRef<OsStr>>(&self, extension: S) -> PathBuf {
if extension.as_ref().is_empty() {
impl Utf8PathBufExt for Utf8PathBuf {
fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf {
if extension.is_empty() {
self.clone()
} else {
let mut fname = self.file_name().unwrap().to_os_string();
if !extension.as_ref().to_str().unwrap().starts_with('.') {
fname.push(".");
let mut fname = self.file_name().unwrap().to_string();
if !extension.starts_with('.') {
fname.push_str(".");
}
fname.push(extension);
fname.push_str(extension);
self.with_file_name(fname)
}
}
@ -71,22 +70,27 @@ pub fn dylib_env_var() -> &'static str {
/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
/// If the dylib_path_var is already set for this cmd, the old value will be overwritten!
pub fn add_dylib_path(cmd: &mut Command, paths: impl Iterator<Item = impl Into<PathBuf>>) {
pub fn add_dylib_path(
cmd: &mut Command,
paths: impl Iterator<Item = impl Into<std::path::PathBuf>>,
) {
let path_env = env::var_os(dylib_env_var());
let old_paths = path_env.as_ref().map(env::split_paths);
let new_paths = paths.map(Into::into).chain(old_paths.into_iter().flatten());
cmd.env(dylib_env_var(), env::join_paths(new_paths).unwrap());
}
pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
std::fs::create_dir_all(&dst)?;
for entry in std::fs::read_dir(src)? {
pub fn copy_dir_all(src: &Utf8Path, dst: &Utf8Path) -> std::io::Result<()> {
std::fs::create_dir_all(dst.as_std_path())?;
for entry in std::fs::read_dir(src.as_std_path())? {
let entry = entry?;
let path = Utf8PathBuf::try_from(entry.path()).unwrap();
let file_name = path.file_name().unwrap();
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
copy_dir_all(&path, &dst.join(file_name))?;
} else {
std::fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
std::fs::copy(path.as_std_path(), dst.join(file_name).as_std_path())?;
}
}
Ok(())

View file

@ -3,12 +3,12 @@ use super::*;
#[test]
fn path_buf_with_extra_extension_test() {
assert_eq!(
PathBuf::from("foo.rs.stderr"),
PathBuf::from("foo.rs").with_extra_extension("stderr")
Utf8PathBuf::from("foo.rs.stderr"),
Utf8PathBuf::from("foo.rs").with_extra_extension("stderr")
);
assert_eq!(
PathBuf::from("foo.rs.stderr"),
PathBuf::from("foo.rs").with_extra_extension(".stderr")
Utf8PathBuf::from("foo.rs.stderr"),
Utf8PathBuf::from("foo.rs").with_extra_extension(".stderr")
);
assert_eq!(PathBuf::from("foo.rs"), PathBuf::from("foo.rs").with_extra_extension(""));
assert_eq!(Utf8PathBuf::from("foo.rs"), Utf8PathBuf::from("foo.rs").with_extra_extension(""));
}

View file

@ -5,6 +5,7 @@ edition = "2021"
[dependencies]
build_helper = { path = "../../build_helper" }
camino = "1"
compiletest = { path = "../compiletest" }
getopts = "0.2"
walkdir = "2"

View file

@ -118,7 +118,11 @@ If you want to install the `browser-ui-test` dependency, run `npm install browse
..Default::default()
};
let test_props = TestProps::from_file(&librs, None, &compiletest_c);
let test_props = TestProps::from_file(
&camino::Utf8PathBuf::try_from(librs).unwrap(),
None,
&compiletest_c,
);
if !test_props.compile_flags.is_empty() {
cargo.env("RUSTDOCFLAGS", test_props.compile_flags.join(" "));

View file

@ -10,6 +10,6 @@ extern "C" {
// CHECK-LABEL: declare{{.*}}void @foo()
// CHECK-SAME: [[ATTRS:#[0-9]+]]
// CHECK-DAG: attributes [[ATTRS]] = { {{.*}}memory(none){{.*}} }
#[ffi_const]
#[unsafe(ffi_const)]
pub fn foo();
}

View file

@ -10,6 +10,6 @@ extern "C" {
// CHECK-LABEL: declare{{.*}}void @foo()
// CHECK-SAME: [[ATTRS:#[0-9]+]]
// CHECK-DAG: attributes [[ATTRS]] = { {{.*}}memory(read){{.*}} }
#[ffi_pure]
#[unsafe(ffi_pure)]
pub fn foo();
}

View file

@ -3,7 +3,6 @@
//
//@ compile-flags: -Zincremental-ignore-spans
//@ revisions: cpass cfail
//@ error-pattern: cycle detected when computing type of `Bar::N`
#![feature(trait_alias)]
#![crate_type="lib"]
@ -13,5 +12,9 @@ trait Bar<const N: usize> {}
#[cfg(cfail)]
trait Bar<const N: dyn BB> {}
//[cfail]~^ ERROR cycle detected when computing type of `Bar::N`
//[cfail]~| ERROR cycle detected when computing type of `Bar::N`
//[cfail]~| ERROR cycle detected when computing type of `Bar::N`
//[cfail]~| ERROR `(dyn Bar<{ 2 + 1 }> + 'static)` is forbidden as the type of a const generic parameter
trait BB = Bar<{ 2 + 1 }>;

View file

@ -1,8 +1,7 @@
//@ revisions: cfail1 cfail2
//@ should-ice
//@ error-pattern: delayed bug triggered by #[rustc_delayed_bug_from_inside_query]
#![feature(rustc_attrs)]
#[rustc_delayed_bug_from_inside_query]
fn main() {}
fn main() {} //~ ERROR delayed bug triggered by #[rustc_delayed_bug_from_inside_query]

View file

@ -1,5 +1,4 @@
//@ aux-build:my_lib.rs
//@ error-pattern: error: linking with
//@ revisions:cfail1 cfail2
//@ compile-flags:-Z query-dep-graph
@ -10,3 +9,5 @@
extern crate my_lib;
fn main() {}
//~? ERROR linking with

View file

@ -1,5 +1,4 @@
//@ compile-flags: --passes unknown-pass
//@ error-pattern: the `passes` flag no longer functions
#![doc(no_default_passes)]
//~^ ERROR unknown `doc` attribute `no_default_passes`

View file

@ -4,7 +4,7 @@ warning: the `passes` flag no longer functions
= help: you may want to use --document-private-items
error: unknown `doc` attribute `no_default_passes`
--> $DIR/deprecated-attrs.rs:4:8
--> $DIR/deprecated-attrs.rs:3:8
|
LL | #![doc(no_default_passes)]
| ^^^^^^^^^^^^^^^^^ no longer functions
@ -15,7 +15,7 @@ LL | #![doc(no_default_passes)]
= note: `#[deny(invalid_doc_attributes)]` on by default
error: unknown `doc` attribute `passes`
--> $DIR/deprecated-attrs.rs:11:8
--> $DIR/deprecated-attrs.rs:10:8
|
LL | #![doc(passes = "collapse-docs unindent-comments")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ no longer functions
@ -25,7 +25,7 @@ LL | #![doc(passes = "collapse-docs unindent-comments")]
= note: `doc(passes)` is now a no-op
error: unknown `doc` attribute `plugins`
--> $DIR/deprecated-attrs.rs:17:8
--> $DIR/deprecated-attrs.rs:16:8
|
LL | #![doc(plugins = "xxx")]
| ^^^^^^^^^^^^^^^ no longer functions

View file

@ -1,4 +1,4 @@
//@ compile-flags:--theme {{src-base}}/invalid-theme-name.rs
//@ error-pattern: must have a .css extension
//~? ERROR invalid argument: "$DIR/invalid-theme-name.rs"
//~? HELP must have a .css extension

View file

@ -1,11 +1,15 @@
// Test that we get the following hint when trying to use a compiler crate without rustc_driver.
//@ error-pattern: try adding `extern crate rustc_driver;` at the top level of this crate
//@ compile-flags: --emit link --error-format=human
//@ compile-flags: --emit link
//@ normalize-stderr: ".*crate .* required.*\n\n" -> ""
//@ normalize-stderr: "aborting due to [0-9]+" -> "aborting due to NUMBER"
//@ dont-require-annotations: ERROR
#![feature(rustc_private)]
extern crate rustc_serialize;
fn main() {}
//~? HELP try adding `extern crate rustc_driver;` at the top level of this crate
//~? HELP try adding `extern crate rustc_driver;` at the top level of this crate
//~? HELP try adding `extern crate rustc_driver;` at the top level of this crate

View file

@ -2,7 +2,6 @@
// Behavior on aarch64 is tested by tests/codegen/fixed-x18.rs.
//
//@ revisions: x64 i686 arm riscv32 riscv64
//@ error-pattern: the `-Zfixed-x18` flag is not supported
//@ dont-check-compiler-stderr
//
//@ compile-flags: -Zfixed-x18

View file

@ -1,5 +1,3 @@
//@ error-pattern: aborting due to 1 previous error
fn main() {
2 + +2; //~ ERROR leading `+` is not supported
}

View file

@ -1,5 +1,5 @@
error: leading `+` is not supported
--> $DIR/issue-36499.rs:4:9
--> $DIR/issue-36499.rs:2:9
|
LL | 2 + +2;
| ^ unexpected `+`

View file

@ -0,0 +1,19 @@
//@ compile-flags: -Zvalidate-mir
//@ edition: 2024
//@ build-pass
// Regression test that we don't ICE when encountering a transmute in a coroutine's
// drop shim body, which is conceptually in the Runtime phase but wasn't having the
// phase updated b/c the pass manager neither optimizes nor updates the phase for
// drop shim bodies.
struct HasDrop;
impl Drop for HasDrop {
fn drop(&mut self) {}
}
fn main() {
async {
vec![async { HasDrop }.await];
};
}

View file

@ -1,20 +1,20 @@
error: unknown start of token: `
--> <crate attribute>:1:1
--> <crate attribute>:1:4
|
LL | `%~@$#
| ^
LL | #![`%~@$#]
| ^
|
help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
|
LL - `%~@$#
LL + '%~@$#
LL - #![`%~@$#]
LL + #!['%~@$#]
|
error: expected identifier, found `%`
--> <crate attribute>:1:2
--> <crate attribute>:1:5
|
LL | `%~@$#
| ^ expected identifier
LL | #![`%~@$#]
| ^ expected identifier
error: aborting due to 2 previous errors

View file

@ -1,5 +1,3 @@
//@ compile-flags: '-Zcrate-attr=feature(yeet_expr)]fn main(){}#[inline'
fn foo() {}
//~? ERROR unexpected closing delimiter: `]`
//~? ERROR unexpected token
fn foo() {} //~ ERROR `main` function not found

View file

@ -1,8 +1,15 @@
error: unexpected closing delimiter: `]`
--> <crate attribute>:1:19
error: unexpected token: keyword `fn`
--> <crate attribute>:1:23
|
LL | feature(yeet_expr)]fn main(){}#[inline
| ^ unexpected closing delimiter
LL | #![feature(yeet_expr)]fn main(){}#[inline]
| ^^ unexpected token after this
error: aborting due to 1 previous error
error[E0601]: `main` function not found in crate `injection`
--> $DIR/injection.rs:3:12
|
LL | fn foo() {}
| ^ consider adding a `main` function to `$DIR/injection.rs`
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0601`.

View file

@ -0,0 +1,3 @@
//@ compile-flags: -Zcrate-attr=feature(yeet_expr)]#![allow(warnings)
//~? ERROR unexpected token
fn foo() {} //~ ERROR `main` function not found

View file

@ -0,0 +1,15 @@
error: unexpected token: `#`
--> <crate attribute>:1:23
|
LL | #![feature(yeet_expr)]#![allow(warnings)]
| ^ unexpected token after this
error[E0601]: `main` function not found in crate `injection2`
--> $DIR/injection2.rs:3:12
|
LL | fn foo() {}
| ^ consider adding a `main` function to `$DIR/injection2.rs`
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0601`.

View file

@ -1,8 +1,8 @@
error: expected identifier, found `#`
--> <crate attribute>:1:1
--> <crate attribute>:1:4
|
LL | #![feature(foo)]
| ^ expected identifier
LL | #![#![feature(foo)]]
| ^ expected identifier
error: aborting due to 1 previous error

View file

@ -1,5 +1,3 @@
//@ compile-flags: -Zcrate-attr=feature(foo),feature(bar)
//~? ERROR expected `]`
fn main() {}
//~? ERROR invalid crate attribute

View file

@ -1,8 +1,8 @@
error: invalid crate attribute
--> <crate attribute>:1:1
error: expected `]`, found `,`
--> <crate attribute>:1:16
|
LL | feature(foo),feature(bar)
| ^^^^^^^^^^^^^
LL | #![feature(foo),feature(bar)]
| ^ expected `]`
error: aborting due to 1 previous error

View file

@ -1,6 +1,4 @@
// Show diagnostics for unbalanced parens.
//@ compile-flags: -Zcrate-attr=(
//~? ERROR mismatched closing delimiter
fn main() {}
//~? ERROR this file contains an unclosed delimiter

View file

@ -1,10 +1,11 @@
error: this file contains an unclosed delimiter
--> <crate attribute>:1:2
error: mismatched closing delimiter: `]`
--> <crate attribute>:1:4
|
LL | (
| -^
| |
| unclosed delimiter
LL | #![(]
| -^^ mismatched closing delimiter
| ||
| |unclosed delimiter
| closing delimiter possibly meant for this
error: aborting due to 1 previous error

View file

@ -10,7 +10,7 @@
//@ [none]compile-flags: --check-cfg=cfg(feature,values())
//@ [some]compile-flags: --check-cfg=cfg(feature,values("bitcode"))
//@ [some]compile-flags: --check-cfg=cfg(CONFIG_NVME,values("y"))
//@ [none]error-pattern:Cargo.toml
//@ dont-require-annotations: HELP
#[cfg(feature = "serde")]
//~^ WARNING unexpected `cfg` condition value
@ -27,6 +27,7 @@ fn tokio() {}
#[cfg(CONFIG_NVME = "m")]
//[none]~^ WARNING unexpected `cfg` condition name
//[some]~^^ WARNING unexpected `cfg` condition value
//[none]~| HELP Cargo.toml
fn tokio() {}
fn main() {}

View file

@ -3,7 +3,6 @@
//@ compile-flags: -Cpanic=abort --print=native-static-libs
//@ build-pass
//@ error-pattern: note: native-static-libs:
//@ dont-check-compiler-stderr (libcore links `/defaultlib:msvcrt` or `/defaultlib:libcmt` on MSVC)
//@ ignore-pass (the note is emitted later in the compilation pipeline, needs build)
@ -14,3 +13,6 @@
fn panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}
//~? NOTE native-static-libs:
//~? NOTE Link against the following native artifacts when linking against this static library

View file

@ -1,3 +1,5 @@
//@ compile-flags: --error-format=human --cfg a(b=c)
//@ error-pattern: invalid `--cfg` argument: `a(b=c)` (expected `key` or `key="value"`, ensure escaping is appropriate for your shell, try 'key="value"' or key=\"value\")
//@ compile-flags: --cfg a(b=c)
fn main() {}
//~? ERROR invalid `--cfg` argument: `a(b=c)` (expected `key` or `key="value"`, ensure escaping is appropriate for your shell, try 'key="value"' or key=\"value\")

View file

@ -1,3 +1,5 @@
//@ compile-flags: --error-format=human --cfg a{b}
//@ error-pattern: invalid `--cfg` argument: `a{b}` (expected `key` or `key="value"`)
//@ compile-flags: --cfg a{b}
fn main() {}
//~? ERROR invalid `--cfg` argument: `a{b}` (expected `key` or `key="value"`)

View file

@ -1,3 +1,5 @@
//@ compile-flags: --error-format=human --cfg a(b)
//@ error-pattern: invalid `--cfg` argument: `a(b)` (expected `key` or `key="value"`)
//@ compile-flags: --cfg a(b)
fn main() {}
//~? ERROR invalid `--cfg` argument: `a(b)` (expected `key` or `key="value"`)

View file

@ -1,3 +1,5 @@
//@ compile-flags: --error-format=human --cfg a{
//@ error-pattern: invalid `--cfg` argument: `a{` (expected `key` or `key="value"`)
//@ compile-flags: --cfg a{
fn main() {}
//~? ERROR invalid `--cfg` argument: `a{` (expected `key` or `key="value"`)

View file

@ -1,3 +1,5 @@
//@ compile-flags: --error-format=human --cfg )
//@ error-pattern: invalid `--cfg` argument: `)` (expected `key` or `key="value"`)
//@ compile-flags: --cfg )
fn main() {}
//~? ERROR invalid `--cfg` argument: `)` (expected `key` or `key="value"`)

View file

@ -1,4 +1,6 @@
// Test for missing quotes around value, issue #66450.
//@ compile-flags: --error-format=human --cfg key=value
//@ error-pattern: invalid `--cfg` argument: `key=value` (expected `key` or `key="value"`, ensure escaping is appropriate for your shell, try 'key="value"' or key=\"value\")
//@ compile-flags: --cfg key=value
fn main() {}
//~? ERROR invalid `--cfg` argument: `key=value` (expected `key` or `key="value"`, ensure escaping is appropriate for your shell, try 'key="value"' or key=\"value\")

View file

@ -1,8 +1,8 @@
// Tests that empty source_maps don't ICE (#23301)
//@ compile-flags: --error-format=human --cfg ""
//@ error-pattern: invalid `--cfg` argument: `""` (expected `key` or `key="value"`)
//@ compile-flags: --cfg ""
pub fn main() {
}
//~? ERROR invalid `--cfg` argument: `""` (expected `key` or `key="value"`)

View file

@ -1,5 +1,4 @@
//@ compile-flags: -Zunleash-the-miri-inside-of-you
//@ error-pattern: calling non-const function `<Vec<i32> as Drop>::drop`
use std::mem::ManuallyDrop;
@ -15,5 +14,7 @@ static TEST_OK: () = {
static TEST_BAD: () = {
let _v: Vec<i32> = Vec::new();
}; //~ ERROR could not evaluate static initializer
//~| NOTE calling non-const function `<Vec<i32> as Drop>::drop`
//~| NOTE inside `std::ptr::drop_in_place::<Vec<i32>> - shim(Some(Vec<i32>))`
//~? WARN skipping const checks

View file

@ -1,5 +1,5 @@
error[E0080]: could not evaluate static initializer
--> $DIR/drop.rs:17:1
--> $DIR/drop.rs:16:1
|
LL | };
| ^ calling non-const function `<Vec<i32> as Drop>::drop`
@ -10,7 +10,7 @@ note: inside `std::ptr::drop_in_place::<Vec<i32>> - shim(Some(Vec<i32>))`
warning: skipping const checks
|
help: skipping check that does not even have a feature gate
--> $DIR/drop.rs:16:9
--> $DIR/drop.rs:15:9
|
LL | let _v: Vec<i32> = Vec::new();
| ^^

View file

@ -6,7 +6,6 @@
//@[zero] compile-flags: -Zdwarf-version=0
//@[one] compile-flags: -Zdwarf-version=1
//@[one] error-pattern: requested DWARF version 1 is not supported
//@[two] compile-flags: -Zdwarf-version=2
//@[two] check-pass

View file

@ -3,8 +3,9 @@
//!
//! Issue: <https://github.com/rust-lang/rust/issues/31788>
//@ error-pattern: first defined in crate `std`
//@ normalize-stderr: "loaded from .*libstd-.*.rlib" -> "loaded from SYSROOT/libstd-*.rlib"
//@ dont-require-annotations: NOTE
#![feature(lang_items)]
extern crate core;
@ -14,6 +15,7 @@ use core::panic::PanicInfo;
#[lang = "panic_impl"]
fn panic_impl(info: &PanicInfo) -> ! {
//~^ ERROR: found duplicate lang item `panic_impl`
//~| NOTE first defined in crate `std`
loop {}
}

View file

@ -1,8 +1,9 @@
error[E0152]: found duplicate lang item `panic_impl`
--> $DIR/E0152-duplicate-lang-items.rs:15:1
--> $DIR/E0152-duplicate-lang-items.rs:16:1
|
LL | / fn panic_impl(info: &PanicInfo) -> ! {
LL | |
LL | |
LL | | loop {}
LL | | }
| |_^

Some files were not shown because too many files have changed in this diff Show more