Auto merge of #138366 - matthiaskrgr:rollup-cn16m7q, r=matthiaskrgr
Rollup of 10 pull requests Successful merges: - #137715 (Allow int literals for pattern types with int base types) - #138002 (Disable CFI for weakly linked syscalls) - #138051 (Add support for downloading GCC from CI) - #138231 (Prevent ICE in autodiff validation by emitting user-friendly errors) - #138245 (stabilize `ci_rustc_if_unchanged_logic` test for local environments) - #138256 (Do not feed anon const a type that references generics that it does not have) - #138284 (Do not write user type annotation for const param value path) - #138296 (Remove `AdtFlags::IS_ANONYMOUS` and `Copy`/`Clone` condition for anonymous ADT) - #138352 (miri native_calls: ensure we actually expose *mutable* provenance to the memory FFI can access) - #138354 (remove redundant `body` arguments) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
c625102320
52 changed files with 1234 additions and 395 deletions
|
@ -24,9 +24,9 @@ use crate::universal_regions::DefiningTy;
|
||||||
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
/// Check explicit closure signature annotation,
|
/// Check explicit closure signature annotation,
|
||||||
/// e.g., `|x: FxIndexMap<_, &'static u32>| ...`.
|
/// e.g., `|x: FxIndexMap<_, &'static u32>| ...`.
|
||||||
#[instrument(skip(self, body), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
pub(super) fn check_signature_annotation(&mut self, body: &Body<'tcx>) {
|
pub(super) fn check_signature_annotation(&mut self) {
|
||||||
let mir_def_id = body.source.def_id().expect_local();
|
let mir_def_id = self.body.source.def_id().expect_local();
|
||||||
|
|
||||||
if !self.tcx().is_closure_like(mir_def_id.to_def_id()) {
|
if !self.tcx().is_closure_like(mir_def_id.to_def_id()) {
|
||||||
return;
|
return;
|
||||||
|
@ -38,9 +38,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
// (e.g., the `_` in the code above) with fresh variables.
|
// (e.g., the `_` in the code above) with fresh variables.
|
||||||
// Then replace the bound items in the fn sig with fresh variables,
|
// Then replace the bound items in the fn sig with fresh variables,
|
||||||
// so that they represent the view from "inside" the closure.
|
// so that they represent the view from "inside" the closure.
|
||||||
let user_provided_sig = self.instantiate_canonical(body.span, &user_provided_poly_sig);
|
let user_provided_sig = self.instantiate_canonical(self.body.span, &user_provided_poly_sig);
|
||||||
let mut user_provided_sig = self.infcx.instantiate_binder_with_fresh_vars(
|
let mut user_provided_sig = self.infcx.instantiate_binder_with_fresh_vars(
|
||||||
body.span,
|
self.body.span,
|
||||||
BoundRegionConversionTime::FnCall,
|
BoundRegionConversionTime::FnCall,
|
||||||
user_provided_sig,
|
user_provided_sig,
|
||||||
);
|
);
|
||||||
|
@ -66,12 +66,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
Ty::new_tup(self.tcx(), user_provided_sig.inputs()),
|
Ty::new_tup(self.tcx(), user_provided_sig.inputs()),
|
||||||
args.tupled_upvars_ty(),
|
args.tupled_upvars_ty(),
|
||||||
args.coroutine_captures_by_ref_ty(),
|
args.coroutine_captures_by_ref_ty(),
|
||||||
self.infcx.next_region_var(RegionVariableOrigin::MiscVariable(body.span), || {
|
self.infcx
|
||||||
RegionCtxt::Unknown
|
.next_region_var(RegionVariableOrigin::MiscVariable(self.body.span), || {
|
||||||
}),
|
RegionCtxt::Unknown
|
||||||
|
}),
|
||||||
);
|
);
|
||||||
|
|
||||||
let next_ty_var = || self.infcx.next_ty_var(body.span);
|
let next_ty_var = || self.infcx.next_ty_var(self.body.span);
|
||||||
let output_ty = Ty::new_coroutine(
|
let output_ty = Ty::new_coroutine(
|
||||||
self.tcx(),
|
self.tcx(),
|
||||||
self.tcx().coroutine_for_closure(mir_def_id),
|
self.tcx().coroutine_for_closure(mir_def_id),
|
||||||
|
@ -107,9 +108,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
for (&user_ty, arg_decl) in user_provided_sig.inputs().iter().zip_eq(
|
for (&user_ty, arg_decl) in user_provided_sig.inputs().iter().zip_eq(
|
||||||
// In MIR, closure args begin with an implicit `self`.
|
// In MIR, closure args begin with an implicit `self`.
|
||||||
// Also, coroutines have a resume type which may be implicitly `()`.
|
// Also, coroutines have a resume type which may be implicitly `()`.
|
||||||
body.args_iter()
|
self.body
|
||||||
|
.args_iter()
|
||||||
.skip(1 + if is_coroutine_with_implicit_resume_ty { 1 } else { 0 })
|
.skip(1 + if is_coroutine_with_implicit_resume_ty { 1 } else { 0 })
|
||||||
.map(|local| &body.local_decls[local]),
|
.map(|local| &self.body.local_decls[local]),
|
||||||
) {
|
) {
|
||||||
self.ascribe_user_type_skip_wf(
|
self.ascribe_user_type_skip_wf(
|
||||||
arg_decl.ty,
|
arg_decl.ty,
|
||||||
|
@ -119,7 +121,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the user explicitly annotated the output type, enforce it.
|
// If the user explicitly annotated the output type, enforce it.
|
||||||
let output_decl = &body.local_decls[RETURN_PLACE];
|
let output_decl = &self.body.local_decls[RETURN_PLACE];
|
||||||
self.ascribe_user_type_skip_wf(
|
self.ascribe_user_type_skip_wf(
|
||||||
output_decl.ty,
|
output_decl.ty,
|
||||||
ty::UserType::new(ty::UserTypeKind::Ty(user_provided_sig.output())),
|
ty::UserType::new(ty::UserTypeKind::Ty(user_provided_sig.output())),
|
||||||
|
@ -127,12 +129,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, body), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
pub(super) fn equate_inputs_and_outputs(
|
pub(super) fn equate_inputs_and_outputs(&mut self, normalized_inputs_and_output: &[Ty<'tcx>]) {
|
||||||
&mut self,
|
|
||||||
body: &Body<'tcx>,
|
|
||||||
normalized_inputs_and_output: &[Ty<'tcx>],
|
|
||||||
) {
|
|
||||||
let (&normalized_output_ty, normalized_input_tys) =
|
let (&normalized_output_ty, normalized_input_tys) =
|
||||||
normalized_inputs_and_output.split_last().unwrap();
|
normalized_inputs_and_output.split_last().unwrap();
|
||||||
|
|
||||||
|
@ -141,18 +139,18 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
|
|
||||||
// Equate expected input tys with those in the MIR.
|
// Equate expected input tys with those in the MIR.
|
||||||
for (argument_index, &normalized_input_ty) in normalized_input_tys.iter().enumerate() {
|
for (argument_index, &normalized_input_ty) in normalized_input_tys.iter().enumerate() {
|
||||||
if argument_index + 1 >= body.local_decls.len() {
|
if argument_index + 1 >= self.body.local_decls.len() {
|
||||||
self.tcx()
|
self.tcx()
|
||||||
.dcx()
|
.dcx()
|
||||||
.span_bug(body.span, "found more normalized_input_ty than local_decls");
|
.span_bug(self.body.span, "found more normalized_input_ty than local_decls");
|
||||||
}
|
}
|
||||||
|
|
||||||
// In MIR, argument N is stored in local N+1.
|
// In MIR, argument N is stored in local N+1.
|
||||||
let local = Local::from_usize(argument_index + 1);
|
let local = Local::from_usize(argument_index + 1);
|
||||||
|
|
||||||
let mir_input_ty = body.local_decls[local].ty;
|
let mir_input_ty = self.body.local_decls[local].ty;
|
||||||
|
|
||||||
let mir_input_span = body.local_decls[local].source_info.span;
|
let mir_input_span = self.body.local_decls[local].source_info.span;
|
||||||
self.equate_normalized_input_or_output(
|
self.equate_normalized_input_or_output(
|
||||||
normalized_input_ty,
|
normalized_input_ty,
|
||||||
mir_input_ty,
|
mir_input_ty,
|
||||||
|
@ -160,8 +158,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(mir_yield_ty) = body.yield_ty() {
|
if let Some(mir_yield_ty) = self.body.yield_ty() {
|
||||||
let yield_span = body.local_decls[RETURN_PLACE].source_info.span;
|
let yield_span = self.body.local_decls[RETURN_PLACE].source_info.span;
|
||||||
self.equate_normalized_input_or_output(
|
self.equate_normalized_input_or_output(
|
||||||
self.universal_regions.yield_ty.unwrap(),
|
self.universal_regions.yield_ty.unwrap(),
|
||||||
mir_yield_ty,
|
mir_yield_ty,
|
||||||
|
@ -169,8 +167,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(mir_resume_ty) = body.resume_ty() {
|
if let Some(mir_resume_ty) = self.body.resume_ty() {
|
||||||
let yield_span = body.local_decls[RETURN_PLACE].source_info.span;
|
let yield_span = self.body.local_decls[RETURN_PLACE].source_info.span;
|
||||||
self.equate_normalized_input_or_output(
|
self.equate_normalized_input_or_output(
|
||||||
self.universal_regions.resume_ty.unwrap(),
|
self.universal_regions.resume_ty.unwrap(),
|
||||||
mir_resume_ty,
|
mir_resume_ty,
|
||||||
|
@ -179,8 +177,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return types are a bit more complex. They may contain opaque `impl Trait` types.
|
// Return types are a bit more complex. They may contain opaque `impl Trait` types.
|
||||||
let mir_output_ty = body.local_decls[RETURN_PLACE].ty;
|
let mir_output_ty = self.body.local_decls[RETURN_PLACE].ty;
|
||||||
let output_span = body.local_decls[RETURN_PLACE].source_info.span;
|
let output_span = self.body.local_decls[RETURN_PLACE].source_info.span;
|
||||||
self.equate_normalized_input_or_output(normalized_output_ty, mir_output_ty, output_span);
|
self.equate_normalized_input_or_output(normalized_output_ty, mir_output_ty, output_span);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ mod trace;
|
||||||
/// performed before
|
/// performed before
|
||||||
pub(super) fn generate<'a, 'tcx>(
|
pub(super) fn generate<'a, 'tcx>(
|
||||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||||
body: &Body<'tcx>,
|
|
||||||
location_map: &DenseLocationMap,
|
location_map: &DenseLocationMap,
|
||||||
flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
|
flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
|
||||||
move_data: &MoveData<'tcx>,
|
move_data: &MoveData<'tcx>,
|
||||||
|
@ -51,23 +50,16 @@ pub(super) fn generate<'a, 'tcx>(
|
||||||
// We do record these regions in the polonius context, since they're used to differentiate
|
// We do record these regions in the polonius context, since they're used to differentiate
|
||||||
// relevant and boring locals, which is a key distinction used later in diagnostics.
|
// relevant and boring locals, which is a key distinction used later in diagnostics.
|
||||||
if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() {
|
if typeck.tcx().sess.opts.unstable_opts.polonius.is_next_enabled() {
|
||||||
let (_, boring_locals) = compute_relevant_live_locals(typeck.tcx(), &free_regions, body);
|
let (_, boring_locals) =
|
||||||
|
compute_relevant_live_locals(typeck.tcx(), &free_regions, typeck.body);
|
||||||
typeck.polonius_liveness.as_mut().unwrap().boring_nll_locals =
|
typeck.polonius_liveness.as_mut().unwrap().boring_nll_locals =
|
||||||
boring_locals.into_iter().collect();
|
boring_locals.into_iter().collect();
|
||||||
free_regions = typeck.universal_regions.universal_regions_iter().collect();
|
free_regions = typeck.universal_regions.universal_regions_iter().collect();
|
||||||
}
|
}
|
||||||
let (relevant_live_locals, boring_locals) =
|
let (relevant_live_locals, boring_locals) =
|
||||||
compute_relevant_live_locals(typeck.tcx(), &free_regions, body);
|
compute_relevant_live_locals(typeck.tcx(), &free_regions, typeck.body);
|
||||||
|
|
||||||
trace::trace(
|
trace::trace(typeck, location_map, flow_inits, move_data, relevant_live_locals, boring_locals);
|
||||||
typeck,
|
|
||||||
body,
|
|
||||||
location_map,
|
|
||||||
flow_inits,
|
|
||||||
move_data,
|
|
||||||
relevant_live_locals,
|
|
||||||
boring_locals,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Mark regions that should be live where they appear within rvalues or within a call: like
|
// Mark regions that should be live where they appear within rvalues or within a call: like
|
||||||
// args, regions, and types.
|
// args, regions, and types.
|
||||||
|
@ -76,7 +68,7 @@ pub(super) fn generate<'a, 'tcx>(
|
||||||
&mut typeck.constraints.liveness_constraints,
|
&mut typeck.constraints.liveness_constraints,
|
||||||
&typeck.universal_regions,
|
&typeck.universal_regions,
|
||||||
&mut typeck.polonius_liveness,
|
&mut typeck.polonius_liveness,
|
||||||
body,
|
typeck.body,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -39,17 +39,15 @@ use crate::type_check::{NormalizeLocation, TypeChecker};
|
||||||
/// this respects `#[may_dangle]` annotations).
|
/// this respects `#[may_dangle]` annotations).
|
||||||
pub(super) fn trace<'a, 'tcx>(
|
pub(super) fn trace<'a, 'tcx>(
|
||||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||||
body: &Body<'tcx>,
|
|
||||||
location_map: &DenseLocationMap,
|
location_map: &DenseLocationMap,
|
||||||
flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
|
flow_inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
|
||||||
move_data: &MoveData<'tcx>,
|
move_data: &MoveData<'tcx>,
|
||||||
relevant_live_locals: Vec<Local>,
|
relevant_live_locals: Vec<Local>,
|
||||||
boring_locals: Vec<Local>,
|
boring_locals: Vec<Local>,
|
||||||
) {
|
) {
|
||||||
let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, body);
|
let local_use_map = &LocalUseMap::build(&relevant_live_locals, location_map, typeck.body);
|
||||||
let cx = LivenessContext {
|
let cx = LivenessContext {
|
||||||
typeck,
|
typeck,
|
||||||
body,
|
|
||||||
flow_inits,
|
flow_inits,
|
||||||
location_map,
|
location_map,
|
||||||
local_use_map,
|
local_use_map,
|
||||||
|
@ -69,14 +67,13 @@ pub(super) fn trace<'a, 'tcx>(
|
||||||
/// Contextual state for the type-liveness coroutine.
|
/// Contextual state for the type-liveness coroutine.
|
||||||
struct LivenessContext<'a, 'typeck, 'b, 'tcx> {
|
struct LivenessContext<'a, 'typeck, 'b, 'tcx> {
|
||||||
/// Current type-checker, giving us our inference context etc.
|
/// Current type-checker, giving us our inference context etc.
|
||||||
|
///
|
||||||
|
/// This also stores the body we're currently analyzing.
|
||||||
typeck: &'a mut TypeChecker<'typeck, 'tcx>,
|
typeck: &'a mut TypeChecker<'typeck, 'tcx>,
|
||||||
|
|
||||||
/// Defines the `PointIndex` mapping
|
/// Defines the `PointIndex` mapping
|
||||||
location_map: &'a DenseLocationMap,
|
location_map: &'a DenseLocationMap,
|
||||||
|
|
||||||
/// MIR we are analyzing.
|
|
||||||
body: &'a Body<'tcx>,
|
|
||||||
|
|
||||||
/// Mapping to/from the various indices used for initialization tracking.
|
/// Mapping to/from the various indices used for initialization tracking.
|
||||||
move_data: &'a MoveData<'tcx>,
|
move_data: &'a MoveData<'tcx>,
|
||||||
|
|
||||||
|
@ -139,7 +136,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
self.compute_use_live_points_for(local);
|
self.compute_use_live_points_for(local);
|
||||||
self.compute_drop_live_points_for(local);
|
self.compute_drop_live_points_for(local);
|
||||||
|
|
||||||
let local_ty = self.cx.body.local_decls[local].ty;
|
let local_ty = self.cx.body().local_decls[local].ty;
|
||||||
|
|
||||||
if !self.use_live_at.is_empty() {
|
if !self.use_live_at.is_empty() {
|
||||||
self.cx.add_use_live_facts_for(local_ty, &self.use_live_at);
|
self.cx.add_use_live_facts_for(local_ty, &self.use_live_at);
|
||||||
|
@ -164,8 +161,8 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
/// and can therefore safely be dropped.
|
/// and can therefore safely be dropped.
|
||||||
fn dropck_boring_locals(&mut self, boring_locals: Vec<Local>) {
|
fn dropck_boring_locals(&mut self, boring_locals: Vec<Local>) {
|
||||||
for local in boring_locals {
|
for local in boring_locals {
|
||||||
let local_ty = self.cx.body.local_decls[local].ty;
|
let local_ty = self.cx.body().local_decls[local].ty;
|
||||||
let local_span = self.cx.body.local_decls[local].source_info.span;
|
let local_span = self.cx.body().local_decls[local].source_info.span;
|
||||||
let drop_data = self.cx.drop_data.entry(local_ty).or_insert_with({
|
let drop_data = self.cx.drop_data.entry(local_ty).or_insert_with({
|
||||||
let typeck = &self.cx.typeck;
|
let typeck = &self.cx.typeck;
|
||||||
move || LivenessContext::compute_drop_data(typeck, local_ty, local_span)
|
move || LivenessContext::compute_drop_data(typeck, local_ty, local_span)
|
||||||
|
@ -173,7 +170,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
|
|
||||||
drop_data.dropck_result.report_overflows(
|
drop_data.dropck_result.report_overflows(
|
||||||
self.cx.typeck.infcx.tcx,
|
self.cx.typeck.infcx.tcx,
|
||||||
self.cx.body.local_decls[local].source_info.span,
|
self.cx.typeck.body.local_decls[local].source_info.span,
|
||||||
local_ty,
|
local_ty,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -202,7 +199,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
.var_dropped_at
|
.var_dropped_at
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|&(local, location_index)| {
|
.filter_map(|&(local, location_index)| {
|
||||||
let local_ty = self.cx.body.local_decls[local].ty;
|
let local_ty = self.cx.body().local_decls[local].ty;
|
||||||
if relevant_live_locals.contains(&local) || !local_ty.has_free_regions() {
|
if relevant_live_locals.contains(&local) || !local_ty.has_free_regions() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -278,9 +275,9 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
|
|
||||||
let block = self.cx.location_map.to_location(block_start).block;
|
let block = self.cx.location_map.to_location(block_start).block;
|
||||||
self.stack.extend(
|
self.stack.extend(
|
||||||
self.cx.body.basic_blocks.predecessors()[block]
|
self.cx.body().basic_blocks.predecessors()[block]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&pred_bb| self.cx.body.terminator_loc(pred_bb))
|
.map(|&pred_bb| self.cx.body().terminator_loc(pred_bb))
|
||||||
.map(|pred_loc| self.cx.location_map.point_from_location(pred_loc)),
|
.map(|pred_loc| self.cx.location_map.point_from_location(pred_loc)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -305,7 +302,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
// Find the drops where `local` is initialized.
|
// Find the drops where `local` is initialized.
|
||||||
for drop_point in self.cx.local_use_map.drops(local) {
|
for drop_point in self.cx.local_use_map.drops(local) {
|
||||||
let location = self.cx.location_map.to_location(drop_point);
|
let location = self.cx.location_map.to_location(drop_point);
|
||||||
debug_assert_eq!(self.cx.body.terminator_loc(location.block), location,);
|
debug_assert_eq!(self.cx.body().terminator_loc(location.block), location,);
|
||||||
|
|
||||||
if self.cx.initialized_at_terminator(location.block, mpi)
|
if self.cx.initialized_at_terminator(location.block, mpi)
|
||||||
&& self.drop_live_at.insert(drop_point)
|
&& self.drop_live_at.insert(drop_point)
|
||||||
|
@ -351,7 +348,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
// block. One of them may be either a definition or use
|
// block. One of them may be either a definition or use
|
||||||
// live point.
|
// live point.
|
||||||
let term_location = self.cx.location_map.to_location(term_point);
|
let term_location = self.cx.location_map.to_location(term_point);
|
||||||
debug_assert_eq!(self.cx.body.terminator_loc(term_location.block), term_location,);
|
debug_assert_eq!(self.cx.body().terminator_loc(term_location.block), term_location,);
|
||||||
let block = term_location.block;
|
let block = term_location.block;
|
||||||
let entry_point = self.cx.location_map.entry_point(term_location.block);
|
let entry_point = self.cx.location_map.entry_point(term_location.block);
|
||||||
for p in (entry_point..term_point).rev() {
|
for p in (entry_point..term_point).rev() {
|
||||||
|
@ -376,7 +373,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let body = self.cx.body;
|
let body = self.cx.typeck.body;
|
||||||
for &pred_block in body.basic_blocks.predecessors()[block].iter() {
|
for &pred_block in body.basic_blocks.predecessors()[block].iter() {
|
||||||
debug!("compute_drop_live_points_for_block: pred_block = {:?}", pred_block,);
|
debug!("compute_drop_live_points_for_block: pred_block = {:?}", pred_block,);
|
||||||
|
|
||||||
|
@ -403,7 +400,7 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let pred_term_loc = self.cx.body.terminator_loc(pred_block);
|
let pred_term_loc = self.cx.body().terminator_loc(pred_block);
|
||||||
let pred_term_point = self.cx.location_map.point_from_location(pred_term_loc);
|
let pred_term_point = self.cx.location_map.point_from_location(pred_term_loc);
|
||||||
|
|
||||||
// If the terminator of this predecessor either *assigns*
|
// If the terminator of this predecessor either *assigns*
|
||||||
|
@ -463,6 +460,9 @@ impl<'a, 'typeck, 'b, 'tcx> LivenessResults<'a, 'typeck, 'b, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||||
|
fn body(&self) -> &Body<'tcx> {
|
||||||
|
self.typeck.body
|
||||||
|
}
|
||||||
/// Returns `true` if the local variable (or some part of it) is initialized at the current
|
/// Returns `true` if the local variable (or some part of it) is initialized at the current
|
||||||
/// cursor position. Callers should call one of the `seek` methods immediately before to point
|
/// cursor position. Callers should call one of the `seek` methods immediately before to point
|
||||||
/// the cursor to the desired location.
|
/// the cursor to the desired location.
|
||||||
|
@ -481,7 +481,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||||
/// DROP of some local variable will have an effect -- note that
|
/// DROP of some local variable will have an effect -- note that
|
||||||
/// drops, as they may unwind, are always terminators.
|
/// drops, as they may unwind, are always terminators.
|
||||||
fn initialized_at_terminator(&mut self, block: BasicBlock, mpi: MovePathIndex) -> bool {
|
fn initialized_at_terminator(&mut self, block: BasicBlock, mpi: MovePathIndex) -> bool {
|
||||||
self.flow_inits.seek_before_primary_effect(self.body.terminator_loc(block));
|
self.flow_inits.seek_before_primary_effect(self.body().terminator_loc(block));
|
||||||
self.initialized_at_curr_loc(mpi)
|
self.initialized_at_curr_loc(mpi)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -491,7 +491,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||||
/// **Warning:** Does not account for the result of `Call`
|
/// **Warning:** Does not account for the result of `Call`
|
||||||
/// instructions.
|
/// instructions.
|
||||||
fn initialized_at_exit(&mut self, block: BasicBlock, mpi: MovePathIndex) -> bool {
|
fn initialized_at_exit(&mut self, block: BasicBlock, mpi: MovePathIndex) -> bool {
|
||||||
self.flow_inits.seek_after_primary_effect(self.body.terminator_loc(block));
|
self.flow_inits.seek_after_primary_effect(self.body().terminator_loc(block));
|
||||||
self.initialized_at_curr_loc(mpi)
|
self.initialized_at_curr_loc(mpi)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -526,7 +526,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||||
values::pretty_print_points(self.location_map, live_at.iter()),
|
values::pretty_print_points(self.location_map, live_at.iter()),
|
||||||
);
|
);
|
||||||
|
|
||||||
let local_span = self.body.local_decls()[dropped_local].source_info.span;
|
let local_span = self.body().local_decls()[dropped_local].source_info.span;
|
||||||
let drop_data = self.drop_data.entry(dropped_ty).or_insert_with({
|
let drop_data = self.drop_data.entry(dropped_ty).or_insert_with({
|
||||||
let typeck = &self.typeck;
|
let typeck = &self.typeck;
|
||||||
move || Self::compute_drop_data(typeck, dropped_ty, local_span)
|
move || Self::compute_drop_data(typeck, dropped_ty, local_span)
|
||||||
|
@ -544,7 +544,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
||||||
|
|
||||||
drop_data.dropck_result.report_overflows(
|
drop_data.dropck_result.report_overflows(
|
||||||
self.typeck.infcx.tcx,
|
self.typeck.infcx.tcx,
|
||||||
self.body.source_info(*drop_locations.first().unwrap()).span,
|
self.typeck.body.source_info(*drop_locations.first().unwrap()).span,
|
||||||
dropped_ty,
|
dropped_ty,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -174,11 +174,11 @@ pub(crate) fn type_check<'a, 'tcx>(
|
||||||
let mut verifier = TypeVerifier { typeck: &mut typeck, promoted, last_span: body.span };
|
let mut verifier = TypeVerifier { typeck: &mut typeck, promoted, last_span: body.span };
|
||||||
verifier.visit_body(body);
|
verifier.visit_body(body);
|
||||||
|
|
||||||
typeck.typeck_mir(body);
|
typeck.typeck_mir();
|
||||||
typeck.equate_inputs_and_outputs(body, &normalized_inputs_and_output);
|
typeck.equate_inputs_and_outputs(&normalized_inputs_and_output);
|
||||||
typeck.check_signature_annotation(body);
|
typeck.check_signature_annotation();
|
||||||
|
|
||||||
liveness::generate(&mut typeck, body, &location_map, flow_inits, move_data);
|
liveness::generate(&mut typeck, &location_map, flow_inits, move_data);
|
||||||
|
|
||||||
let opaque_type_values =
|
let opaque_type_values =
|
||||||
opaque_types::take_opaques_and_register_member_constraints(&mut typeck);
|
opaque_types::take_opaques_and_register_member_constraints(&mut typeck);
|
||||||
|
@ -485,6 +485,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
#[instrument(level = "debug", skip(self))]
|
||||||
fn visit_body(&mut self, body: &Body<'tcx>) {
|
fn visit_body(&mut self, body: &Body<'tcx>) {
|
||||||
|
debug_assert!(std::ptr::eq(self.typeck.body, body));
|
||||||
// We intentionally do not recurse into `body.required_consts` or
|
// We intentionally do not recurse into `body.required_consts` or
|
||||||
// `body.mentioned_items` here as the MIR at this phase should still
|
// `body.mentioned_items` here as the MIR at this phase should still
|
||||||
// refer to all items and we don't want to check them multiple times.
|
// refer to all items and we don't want to check them multiple times.
|
||||||
|
@ -542,7 +543,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
||||||
|
|
||||||
self.visit_body(promoted_body);
|
self.visit_body(promoted_body);
|
||||||
|
|
||||||
self.typeck.typeck_mir(promoted_body);
|
self.typeck.typeck_mir();
|
||||||
|
|
||||||
self.typeck.body = parent_body;
|
self.typeck.body = parent_body;
|
||||||
// Merge the outlives constraints back in, at the given location.
|
// Merge the outlives constraints back in, at the given location.
|
||||||
|
@ -892,8 +893,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
self.infcx.tcx
|
self.infcx.tcx
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, body), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
fn check_stmt(&mut self, body: &Body<'tcx>, stmt: &Statement<'tcx>, location: Location) {
|
fn check_stmt(&mut self, stmt: &Statement<'tcx>, location: Location) {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
debug!("stmt kind: {:?}", stmt.kind);
|
debug!("stmt kind: {:?}", stmt.kind);
|
||||||
match &stmt.kind {
|
match &stmt.kind {
|
||||||
|
@ -916,11 +917,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(l)
|
Some(l)
|
||||||
if matches!(body.local_decls[l].local_info(), LocalInfo::AggregateTemp) =>
|
if matches!(
|
||||||
|
self.body.local_decls[l].local_info(),
|
||||||
|
LocalInfo::AggregateTemp
|
||||||
|
) =>
|
||||||
{
|
{
|
||||||
ConstraintCategory::Usage
|
ConstraintCategory::Usage
|
||||||
}
|
}
|
||||||
Some(l) if !body.local_decls[l].is_user_variable() => {
|
Some(l) if !self.body.local_decls[l].is_user_variable() => {
|
||||||
ConstraintCategory::Boring
|
ConstraintCategory::Boring
|
||||||
}
|
}
|
||||||
_ => ConstraintCategory::Assignment,
|
_ => ConstraintCategory::Assignment,
|
||||||
|
@ -928,14 +932,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
debug!(
|
debug!(
|
||||||
"assignment category: {:?} {:?}",
|
"assignment category: {:?} {:?}",
|
||||||
category,
|
category,
|
||||||
place.as_local().map(|l| &body.local_decls[l])
|
place.as_local().map(|l| &self.body.local_decls[l])
|
||||||
);
|
);
|
||||||
|
|
||||||
let place_ty = place.ty(body, tcx).ty;
|
let place_ty = place.ty(self.body, tcx).ty;
|
||||||
debug!(?place_ty);
|
debug!(?place_ty);
|
||||||
let place_ty = self.normalize(place_ty, location);
|
let place_ty = self.normalize(place_ty, location);
|
||||||
debug!("place_ty normalized: {:?}", place_ty);
|
debug!("place_ty normalized: {:?}", place_ty);
|
||||||
let rv_ty = rv.ty(body, tcx);
|
let rv_ty = rv.ty(self.body, tcx);
|
||||||
debug!(?rv_ty);
|
debug!(?rv_ty);
|
||||||
let rv_ty = self.normalize(rv_ty, location);
|
let rv_ty = self.normalize(rv_ty, location);
|
||||||
debug!("normalized rv_ty: {:?}", rv_ty);
|
debug!("normalized rv_ty: {:?}", rv_ty);
|
||||||
|
@ -972,7 +976,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.check_rvalue(body, rv, location);
|
self.check_rvalue(rv, location);
|
||||||
if !self.unsized_feature_enabled() {
|
if !self.unsized_feature_enabled() {
|
||||||
let trait_ref = ty::TraitRef::new(
|
let trait_ref = ty::TraitRef::new(
|
||||||
tcx,
|
tcx,
|
||||||
|
@ -987,7 +991,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
StatementKind::AscribeUserType(box (place, projection), variance) => {
|
StatementKind::AscribeUserType(box (place, projection), variance) => {
|
||||||
let place_ty = place.ty(body, tcx).ty;
|
let place_ty = place.ty(self.body, tcx).ty;
|
||||||
if let Err(terr) = self.relate_type_and_user_type(
|
if let Err(terr) = self.relate_type_and_user_type(
|
||||||
place_ty,
|
place_ty,
|
||||||
*variance,
|
*variance,
|
||||||
|
@ -1029,13 +1033,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, body, term_location), level = "debug")]
|
#[instrument(skip(self, term_location), level = "debug")]
|
||||||
fn check_terminator(
|
fn check_terminator(&mut self, term: &Terminator<'tcx>, term_location: Location) {
|
||||||
&mut self,
|
|
||||||
body: &Body<'tcx>,
|
|
||||||
term: &Terminator<'tcx>,
|
|
||||||
term_location: Location,
|
|
||||||
) {
|
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
debug!("terminator kind: {:?}", term.kind);
|
debug!("terminator kind: {:?}", term.kind);
|
||||||
match &term.kind {
|
match &term.kind {
|
||||||
|
@ -1055,7 +1054,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
TerminatorKind::SwitchInt { discr, .. } => {
|
TerminatorKind::SwitchInt { discr, .. } => {
|
||||||
self.check_operand(discr, term_location);
|
self.check_operand(discr, term_location);
|
||||||
|
|
||||||
let switch_ty = discr.ty(body, tcx);
|
let switch_ty = discr.ty(self.body, tcx);
|
||||||
if !switch_ty.is_integral() && !switch_ty.is_char() && !switch_ty.is_bool() {
|
if !switch_ty.is_integral() && !switch_ty.is_char() && !switch_ty.is_bool() {
|
||||||
span_mirbug!(self, term, "bad SwitchInt discr ty {:?}", switch_ty);
|
span_mirbug!(self, term, "bad SwitchInt discr ty {:?}", switch_ty);
|
||||||
}
|
}
|
||||||
|
@ -1074,7 +1073,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
self.check_operand(&arg.node, term_location);
|
self.check_operand(&arg.node, term_location);
|
||||||
}
|
}
|
||||||
|
|
||||||
let func_ty = func.ty(body, tcx);
|
let func_ty = func.ty(self.body, tcx);
|
||||||
debug!("func_ty.kind: {:?}", func_ty.kind());
|
debug!("func_ty.kind: {:?}", func_ty.kind());
|
||||||
|
|
||||||
let sig = match func_ty.kind() {
|
let sig = match func_ty.kind() {
|
||||||
|
@ -1142,7 +1141,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let TerminatorKind::Call { destination, target, .. } = term.kind {
|
if let TerminatorKind::Call { destination, target, .. } = term.kind {
|
||||||
self.check_call_dest(body, term, &sig, destination, target, term_location);
|
self.check_call_dest(term, &sig, destination, target, term_location);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The ordinary liveness rules will ensure that all
|
// The ordinary liveness rules will ensure that all
|
||||||
|
@ -1157,21 +1156,21 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
self.constraints.liveness_constraints.add_location(region_vid, term_location);
|
self.constraints.liveness_constraints.add_location(region_vid, term_location);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.check_call_inputs(body, term, func, &sig, args, term_location, call_source);
|
self.check_call_inputs(term, func, &sig, args, term_location, call_source);
|
||||||
}
|
}
|
||||||
TerminatorKind::Assert { cond, msg, .. } => {
|
TerminatorKind::Assert { cond, msg, .. } => {
|
||||||
self.check_operand(cond, term_location);
|
self.check_operand(cond, term_location);
|
||||||
|
|
||||||
let cond_ty = cond.ty(body, tcx);
|
let cond_ty = cond.ty(self.body, tcx);
|
||||||
if cond_ty != tcx.types.bool {
|
if cond_ty != tcx.types.bool {
|
||||||
span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty);
|
span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let AssertKind::BoundsCheck { len, index } = &**msg {
|
if let AssertKind::BoundsCheck { len, index } = &**msg {
|
||||||
if len.ty(body, tcx) != tcx.types.usize {
|
if len.ty(self.body, tcx) != tcx.types.usize {
|
||||||
span_mirbug!(self, len, "bounds-check length non-usize {:?}", len)
|
span_mirbug!(self, len, "bounds-check length non-usize {:?}", len)
|
||||||
}
|
}
|
||||||
if index.ty(body, tcx) != tcx.types.usize {
|
if index.ty(self.body, tcx) != tcx.types.usize {
|
||||||
span_mirbug!(self, index, "bounds-check index non-usize {:?}", index)
|
span_mirbug!(self, index, "bounds-check index non-usize {:?}", index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1179,10 +1178,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
TerminatorKind::Yield { value, resume_arg, .. } => {
|
TerminatorKind::Yield { value, resume_arg, .. } => {
|
||||||
self.check_operand(value, term_location);
|
self.check_operand(value, term_location);
|
||||||
|
|
||||||
match body.yield_ty() {
|
match self.body.yield_ty() {
|
||||||
None => span_mirbug!(self, term, "yield in non-coroutine"),
|
None => span_mirbug!(self, term, "yield in non-coroutine"),
|
||||||
Some(ty) => {
|
Some(ty) => {
|
||||||
let value_ty = value.ty(body, tcx);
|
let value_ty = value.ty(self.body, tcx);
|
||||||
if let Err(terr) = self.sub_types(
|
if let Err(terr) = self.sub_types(
|
||||||
value_ty,
|
value_ty,
|
||||||
ty,
|
ty,
|
||||||
|
@ -1201,10 +1200,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match body.resume_ty() {
|
match self.body.resume_ty() {
|
||||||
None => span_mirbug!(self, term, "yield in non-coroutine"),
|
None => span_mirbug!(self, term, "yield in non-coroutine"),
|
||||||
Some(ty) => {
|
Some(ty) => {
|
||||||
let resume_ty = resume_arg.ty(body, tcx);
|
let resume_ty = resume_arg.ty(self.body, tcx);
|
||||||
if let Err(terr) = self.sub_types(
|
if let Err(terr) = self.sub_types(
|
||||||
ty,
|
ty,
|
||||||
resume_ty.ty,
|
resume_ty.ty,
|
||||||
|
@ -1228,7 +1227,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
|
|
||||||
fn check_call_dest(
|
fn check_call_dest(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &Body<'tcx>,
|
|
||||||
term: &Terminator<'tcx>,
|
term: &Terminator<'tcx>,
|
||||||
sig: &ty::FnSig<'tcx>,
|
sig: &ty::FnSig<'tcx>,
|
||||||
destination: Place<'tcx>,
|
destination: Place<'tcx>,
|
||||||
|
@ -1238,7 +1236,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
match target {
|
match target {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
let dest_ty = destination.ty(body, tcx).ty;
|
let dest_ty = destination.ty(self.body, tcx).ty;
|
||||||
let dest_ty = self.normalize(dest_ty, term_location);
|
let dest_ty = self.normalize(dest_ty, term_location);
|
||||||
let category = match destination.as_local() {
|
let category = match destination.as_local() {
|
||||||
Some(RETURN_PLACE) => {
|
Some(RETURN_PLACE) => {
|
||||||
|
@ -1254,7 +1252,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
ConstraintCategory::Return(ReturnConstraint::Normal)
|
ConstraintCategory::Return(ReturnConstraint::Normal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(l) if !body.local_decls[l].is_user_variable() => {
|
Some(l) if !self.body.local_decls[l].is_user_variable() => {
|
||||||
ConstraintCategory::Boring
|
ConstraintCategory::Boring
|
||||||
}
|
}
|
||||||
// The return type of a call is interesting for diagnostics.
|
// The return type of a call is interesting for diagnostics.
|
||||||
|
@ -1295,10 +1293,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self, body, term, func, term_location, call_source))]
|
#[instrument(level = "debug", skip(self, term, func, term_location, call_source))]
|
||||||
fn check_call_inputs(
|
fn check_call_inputs(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &Body<'tcx>,
|
|
||||||
term: &Terminator<'tcx>,
|
term: &Terminator<'tcx>,
|
||||||
func: &Operand<'tcx>,
|
func: &Operand<'tcx>,
|
||||||
sig: &ty::FnSig<'tcx>,
|
sig: &ty::FnSig<'tcx>,
|
||||||
|
@ -1310,7 +1307,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
|
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
|
||||||
}
|
}
|
||||||
|
|
||||||
let func_ty = func.ty(body, self.infcx.tcx);
|
let func_ty = func.ty(self.body, self.infcx.tcx);
|
||||||
if let ty::FnDef(def_id, _) = *func_ty.kind() {
|
if let ty::FnDef(def_id, _) = *func_ty.kind() {
|
||||||
// Some of the SIMD intrinsics are special: they need a particular argument to be a
|
// Some of the SIMD intrinsics are special: they need a particular argument to be a
|
||||||
// constant. (Eventually this should use const-generics, but those are not up for the
|
// constant. (Eventually this should use const-generics, but those are not up for the
|
||||||
|
@ -1334,7 +1331,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
debug!(?func_ty);
|
debug!(?func_ty);
|
||||||
|
|
||||||
for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
|
for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
|
||||||
let op_arg_ty = op_arg.node.ty(body, self.tcx());
|
let op_arg_ty = op_arg.node.ty(self.body, self.tcx());
|
||||||
|
|
||||||
let op_arg_ty = self.normalize(op_arg_ty, term_location);
|
let op_arg_ty = self.normalize(op_arg_ty, term_location);
|
||||||
let category = if call_source.from_hir_call() {
|
let category = if call_source.from_hir_call() {
|
||||||
|
@ -1358,16 +1355,16 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_iscleanup(&mut self, body: &Body<'tcx>, block_data: &BasicBlockData<'tcx>) {
|
fn check_iscleanup(&mut self, block_data: &BasicBlockData<'tcx>) {
|
||||||
let is_cleanup = block_data.is_cleanup;
|
let is_cleanup = block_data.is_cleanup;
|
||||||
self.last_span = block_data.terminator().source_info.span;
|
self.last_span = block_data.terminator().source_info.span;
|
||||||
match block_data.terminator().kind {
|
match block_data.terminator().kind {
|
||||||
TerminatorKind::Goto { target } => {
|
TerminatorKind::Goto { target } => {
|
||||||
self.assert_iscleanup(body, block_data, target, is_cleanup)
|
self.assert_iscleanup(block_data, target, is_cleanup)
|
||||||
}
|
}
|
||||||
TerminatorKind::SwitchInt { ref targets, .. } => {
|
TerminatorKind::SwitchInt { ref targets, .. } => {
|
||||||
for target in targets.all_targets() {
|
for target in targets.all_targets() {
|
||||||
self.assert_iscleanup(body, block_data, *target, is_cleanup);
|
self.assert_iscleanup(block_data, *target, is_cleanup);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TerminatorKind::UnwindResume => {
|
TerminatorKind::UnwindResume => {
|
||||||
|
@ -1399,55 +1396,48 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
if is_cleanup {
|
if is_cleanup {
|
||||||
span_mirbug!(self, block_data, "yield in cleanup block")
|
span_mirbug!(self, block_data, "yield in cleanup block")
|
||||||
}
|
}
|
||||||
self.assert_iscleanup(body, block_data, resume, is_cleanup);
|
self.assert_iscleanup(block_data, resume, is_cleanup);
|
||||||
if let Some(drop) = drop {
|
if let Some(drop) = drop {
|
||||||
self.assert_iscleanup(body, block_data, drop, is_cleanup);
|
self.assert_iscleanup(block_data, drop, is_cleanup);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TerminatorKind::Unreachable => {}
|
TerminatorKind::Unreachable => {}
|
||||||
TerminatorKind::Drop { target, unwind, .. }
|
TerminatorKind::Drop { target, unwind, .. }
|
||||||
| TerminatorKind::Assert { target, unwind, .. } => {
|
| TerminatorKind::Assert { target, unwind, .. } => {
|
||||||
self.assert_iscleanup(body, block_data, target, is_cleanup);
|
self.assert_iscleanup(block_data, target, is_cleanup);
|
||||||
self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup);
|
self.assert_iscleanup_unwind(block_data, unwind, is_cleanup);
|
||||||
}
|
}
|
||||||
TerminatorKind::Call { ref target, unwind, .. } => {
|
TerminatorKind::Call { ref target, unwind, .. } => {
|
||||||
if let &Some(target) = target {
|
if let &Some(target) = target {
|
||||||
self.assert_iscleanup(body, block_data, target, is_cleanup);
|
self.assert_iscleanup(block_data, target, is_cleanup);
|
||||||
}
|
}
|
||||||
self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup);
|
self.assert_iscleanup_unwind(block_data, unwind, is_cleanup);
|
||||||
}
|
}
|
||||||
TerminatorKind::FalseEdge { real_target, imaginary_target } => {
|
TerminatorKind::FalseEdge { real_target, imaginary_target } => {
|
||||||
self.assert_iscleanup(body, block_data, real_target, is_cleanup);
|
self.assert_iscleanup(block_data, real_target, is_cleanup);
|
||||||
self.assert_iscleanup(body, block_data, imaginary_target, is_cleanup);
|
self.assert_iscleanup(block_data, imaginary_target, is_cleanup);
|
||||||
}
|
}
|
||||||
TerminatorKind::FalseUnwind { real_target, unwind } => {
|
TerminatorKind::FalseUnwind { real_target, unwind } => {
|
||||||
self.assert_iscleanup(body, block_data, real_target, is_cleanup);
|
self.assert_iscleanup(block_data, real_target, is_cleanup);
|
||||||
self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup);
|
self.assert_iscleanup_unwind(block_data, unwind, is_cleanup);
|
||||||
}
|
}
|
||||||
TerminatorKind::InlineAsm { ref targets, unwind, .. } => {
|
TerminatorKind::InlineAsm { ref targets, unwind, .. } => {
|
||||||
for &target in targets {
|
for &target in targets {
|
||||||
self.assert_iscleanup(body, block_data, target, is_cleanup);
|
self.assert_iscleanup(block_data, target, is_cleanup);
|
||||||
}
|
}
|
||||||
self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup);
|
self.assert_iscleanup_unwind(block_data, unwind, is_cleanup);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_iscleanup(
|
fn assert_iscleanup(&mut self, ctxt: &dyn fmt::Debug, bb: BasicBlock, iscleanuppad: bool) {
|
||||||
&mut self,
|
if self.body[bb].is_cleanup != iscleanuppad {
|
||||||
body: &Body<'tcx>,
|
|
||||||
ctxt: &dyn fmt::Debug,
|
|
||||||
bb: BasicBlock,
|
|
||||||
iscleanuppad: bool,
|
|
||||||
) {
|
|
||||||
if body[bb].is_cleanup != iscleanuppad {
|
|
||||||
span_mirbug!(self, ctxt, "cleanuppad mismatch: {:?} should be {:?}", bb, iscleanuppad);
|
span_mirbug!(self, ctxt, "cleanuppad mismatch: {:?} should be {:?}", bb, iscleanuppad);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_iscleanup_unwind(
|
fn assert_iscleanup_unwind(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &Body<'tcx>,
|
|
||||||
ctxt: &dyn fmt::Debug,
|
ctxt: &dyn fmt::Debug,
|
||||||
unwind: UnwindAction,
|
unwind: UnwindAction,
|
||||||
is_cleanup: bool,
|
is_cleanup: bool,
|
||||||
|
@ -1457,7 +1447,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
if is_cleanup {
|
if is_cleanup {
|
||||||
span_mirbug!(self, ctxt, "unwind on cleanup block")
|
span_mirbug!(self, ctxt, "unwind on cleanup block")
|
||||||
}
|
}
|
||||||
self.assert_iscleanup(body, ctxt, unwind, true);
|
self.assert_iscleanup(ctxt, unwind, true);
|
||||||
}
|
}
|
||||||
UnwindAction::Continue => {
|
UnwindAction::Continue => {
|
||||||
if is_cleanup {
|
if is_cleanup {
|
||||||
|
@ -1468,8 +1458,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_local(&mut self, body: &Body<'tcx>, local: Local, local_decl: &LocalDecl<'tcx>) {
|
fn check_local(&mut self, local: Local, local_decl: &LocalDecl<'tcx>) {
|
||||||
match body.local_kind(local) {
|
match self.body.local_kind(local) {
|
||||||
LocalKind::ReturnPointer | LocalKind::Arg => {
|
LocalKind::ReturnPointer | LocalKind::Arg => {
|
||||||
// return values of normal functions are required to be
|
// return values of normal functions are required to be
|
||||||
// sized by typeck, but return values of ADT constructors are
|
// sized by typeck, but return values of ADT constructors are
|
||||||
|
@ -1598,23 +1588,23 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, body), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
fn check_rvalue(&mut self, body: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
|
fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
|
||||||
let tcx = self.tcx();
|
let tcx = self.tcx();
|
||||||
let span = body.source_info(location).span;
|
let span = self.body.source_info(location).span;
|
||||||
|
|
||||||
match rvalue {
|
match rvalue {
|
||||||
Rvalue::Aggregate(ak, ops) => {
|
Rvalue::Aggregate(ak, ops) => {
|
||||||
for op in ops {
|
for op in ops {
|
||||||
self.check_operand(op, location);
|
self.check_operand(op, location);
|
||||||
}
|
}
|
||||||
self.check_aggregate_rvalue(body, rvalue, ak, ops, location)
|
self.check_aggregate_rvalue(rvalue, ak, ops, location)
|
||||||
}
|
}
|
||||||
|
|
||||||
Rvalue::Repeat(operand, len) => {
|
Rvalue::Repeat(operand, len) => {
|
||||||
self.check_operand(operand, location);
|
self.check_operand(operand, location);
|
||||||
|
|
||||||
let array_ty = rvalue.ty(body.local_decls(), tcx);
|
let array_ty = rvalue.ty(self.body.local_decls(), tcx);
|
||||||
self.prove_predicate(
|
self.prove_predicate(
|
||||||
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(array_ty.into())),
|
ty::PredicateKind::Clause(ty::ClauseKind::WellFormed(array_ty.into())),
|
||||||
Locations::Single(location),
|
Locations::Single(location),
|
||||||
|
@ -1633,7 +1623,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
Operand::Move(place) => {
|
Operand::Move(place) => {
|
||||||
// Make sure that repeated elements implement `Copy`.
|
// Make sure that repeated elements implement `Copy`.
|
||||||
let ty = place.ty(body, tcx).ty;
|
let ty = place.ty(self.body, tcx).ty;
|
||||||
let trait_ref = ty::TraitRef::new(
|
let trait_ref = ty::TraitRef::new(
|
||||||
tcx,
|
tcx,
|
||||||
tcx.require_lang_item(LangItem::Copy, Some(span)),
|
tcx.require_lang_item(LangItem::Copy, Some(span)),
|
||||||
|
@ -1688,7 +1678,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
match *cast_kind {
|
match *cast_kind {
|
||||||
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, coercion_source) => {
|
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, coercion_source) => {
|
||||||
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
||||||
let src_ty = op.ty(body, tcx);
|
let src_ty = op.ty(self.body, tcx);
|
||||||
let mut src_sig = src_ty.fn_sig(tcx);
|
let mut src_sig = src_ty.fn_sig(tcx);
|
||||||
if let ty::FnDef(def_id, _) = src_ty.kind()
|
if let ty::FnDef(def_id, _) = src_ty.kind()
|
||||||
&& let ty::FnPtr(_, target_hdr) = *ty.kind()
|
&& let ty::FnPtr(_, target_hdr) = *ty.kind()
|
||||||
|
@ -1697,7 +1687,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
&& let Some(safe_sig) = tcx.adjust_target_feature_sig(
|
&& let Some(safe_sig) = tcx.adjust_target_feature_sig(
|
||||||
*def_id,
|
*def_id,
|
||||||
src_sig,
|
src_sig,
|
||||||
body.source.def_id(),
|
self.body.source.def_id(),
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
src_sig = safe_sig;
|
src_sig = safe_sig;
|
||||||
|
@ -1790,7 +1780,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
PointerCoercion::ClosureFnPointer(safety),
|
PointerCoercion::ClosureFnPointer(safety),
|
||||||
coercion_source,
|
coercion_source,
|
||||||
) => {
|
) => {
|
||||||
let sig = match op.ty(body, tcx).kind() {
|
let sig = match op.ty(self.body, tcx).kind() {
|
||||||
ty::Closure(_, args) => args.as_closure().sig(),
|
ty::Closure(_, args) => args.as_closure().sig(),
|
||||||
_ => bug!(),
|
_ => bug!(),
|
||||||
};
|
};
|
||||||
|
@ -1819,7 +1809,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
PointerCoercion::UnsafeFnPointer,
|
PointerCoercion::UnsafeFnPointer,
|
||||||
coercion_source,
|
coercion_source,
|
||||||
) => {
|
) => {
|
||||||
let fn_sig = op.ty(body, tcx).fn_sig(tcx);
|
let fn_sig = op.ty(self.body, tcx).fn_sig(tcx);
|
||||||
|
|
||||||
// The type that we see in the fcx is like
|
// The type that we see in the fcx is like
|
||||||
// `foo::<'a, 'b>`, where `foo` is the path to a
|
// `foo::<'a, 'b>`, where `foo` is the path to a
|
||||||
|
@ -1853,7 +1843,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
let trait_ref = ty::TraitRef::new(
|
let trait_ref = ty::TraitRef::new(
|
||||||
tcx,
|
tcx,
|
||||||
tcx.require_lang_item(LangItem::CoerceUnsized, Some(span)),
|
tcx.require_lang_item(LangItem::CoerceUnsized, Some(span)),
|
||||||
[op.ty(body, tcx), ty],
|
[op.ty(self.body, tcx), ty],
|
||||||
);
|
);
|
||||||
|
|
||||||
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
||||||
|
@ -1879,7 +1869,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
_ => panic!("Invalid dyn* cast_ty"),
|
_ => panic!("Invalid dyn* cast_ty"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let self_ty = op.ty(body, tcx);
|
let self_ty = op.ty(self.body, tcx);
|
||||||
|
|
||||||
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
let is_implicit_coercion = coercion_source == CoercionSource::Implicit;
|
||||||
self.prove_predicates(
|
self.prove_predicates(
|
||||||
|
@ -1906,7 +1896,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
PointerCoercion::MutToConstPointer,
|
PointerCoercion::MutToConstPointer,
|
||||||
coercion_source,
|
coercion_source,
|
||||||
) => {
|
) => {
|
||||||
let ty::RawPtr(ty_from, hir::Mutability::Mut) = op.ty(body, tcx).kind()
|
let ty::RawPtr(ty_from, hir::Mutability::Mut) =
|
||||||
|
op.ty(self.body, tcx).kind()
|
||||||
else {
|
else {
|
||||||
span_mirbug!(self, rvalue, "unexpected base type for cast {:?}", ty,);
|
span_mirbug!(self, rvalue, "unexpected base type for cast {:?}", ty,);
|
||||||
return;
|
return;
|
||||||
|
@ -1934,7 +1925,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, coercion_source) => {
|
CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, coercion_source) => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
|
|
||||||
let opt_ty_elem_mut = match ty_from.kind() {
|
let opt_ty_elem_mut = match ty_from.kind() {
|
||||||
ty::RawPtr(array_ty, array_mut) => match array_ty.kind() {
|
ty::RawPtr(array_ty, array_mut) => match array_ty.kind() {
|
||||||
|
@ -1997,7 +1988,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
CastKind::PointerExposeProvenance => {
|
CastKind::PointerExposeProvenance => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2015,7 +2006,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
CastKind::PointerWithExposedProvenance => {
|
CastKind::PointerWithExposedProvenance => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2032,7 +2023,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::IntToInt => {
|
CastKind::IntToInt => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2049,7 +2040,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::IntToFloat => {
|
CastKind::IntToFloat => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2066,7 +2057,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::FloatToInt => {
|
CastKind::FloatToInt => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2083,7 +2074,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::FloatToFloat => {
|
CastKind::FloatToFloat => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2100,7 +2091,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::FnPtrToPtr => {
|
CastKind::FnPtrToPtr => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2117,7 +2108,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CastKind::PtrToPtr => {
|
CastKind::PtrToPtr => {
|
||||||
let ty_from = op.ty(body, tcx);
|
let ty_from = op.ty(self.body, tcx);
|
||||||
let cast_ty_from = CastTy::from_ty(ty_from);
|
let cast_ty_from = CastTy::from_ty(ty_from);
|
||||||
let cast_ty_to = CastTy::from_ty(*ty);
|
let cast_ty_to = CastTy::from_ty(*ty);
|
||||||
match (cast_ty_from, cast_ty_to) {
|
match (cast_ty_from, cast_ty_to) {
|
||||||
|
@ -2193,7 +2184,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
|
Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
|
||||||
self.add_reborrow_constraint(body, location, *region, borrowed_place);
|
self.add_reborrow_constraint(location, *region, borrowed_place);
|
||||||
}
|
}
|
||||||
|
|
||||||
Rvalue::BinaryOp(
|
Rvalue::BinaryOp(
|
||||||
|
@ -2203,12 +2194,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
self.check_operand(left, location);
|
self.check_operand(left, location);
|
||||||
self.check_operand(right, location);
|
self.check_operand(right, location);
|
||||||
|
|
||||||
let ty_left = left.ty(body, tcx);
|
let ty_left = left.ty(self.body, tcx);
|
||||||
match ty_left.kind() {
|
match ty_left.kind() {
|
||||||
// Types with regions are comparable if they have a common super-type.
|
// Types with regions are comparable if they have a common super-type.
|
||||||
ty::RawPtr(_, _) | ty::FnPtr(..) => {
|
ty::RawPtr(_, _) | ty::FnPtr(..) => {
|
||||||
let ty_right = right.ty(body, tcx);
|
let ty_right = right.ty(self.body, tcx);
|
||||||
let common_ty = self.infcx.next_ty_var(body.source_info(location).span);
|
let common_ty =
|
||||||
|
self.infcx.next_ty_var(self.body.source_info(location).span);
|
||||||
self.sub_types(
|
self.sub_types(
|
||||||
ty_left,
|
ty_left,
|
||||||
common_ty,
|
common_ty,
|
||||||
|
@ -2237,7 +2229,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
// For types with no regions we can just check that the
|
// For types with no regions we can just check that the
|
||||||
// both operands have the same type.
|
// both operands have the same type.
|
||||||
ty::Int(_) | ty::Uint(_) | ty::Bool | ty::Char | ty::Float(_)
|
ty::Int(_) | ty::Uint(_) | ty::Bool | ty::Char | ty::Float(_)
|
||||||
if ty_left == right.ty(body, tcx) => {}
|
if ty_left == right.ty(self.body, tcx) => {}
|
||||||
// Other types are compared by trait methods, not by
|
// Other types are compared by trait methods, not by
|
||||||
// `Rvalue::BinaryOp`.
|
// `Rvalue::BinaryOp`.
|
||||||
_ => span_mirbug!(
|
_ => span_mirbug!(
|
||||||
|
@ -2245,7 +2237,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
rvalue,
|
rvalue,
|
||||||
"unexpected comparison types {:?} and {:?}",
|
"unexpected comparison types {:?} and {:?}",
|
||||||
ty_left,
|
ty_left,
|
||||||
right.ty(body, tcx)
|
right.ty(self.body, tcx)
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2326,7 +2318,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
|
|
||||||
fn check_aggregate_rvalue(
|
fn check_aggregate_rvalue(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &Body<'tcx>,
|
|
||||||
rvalue: &Rvalue<'tcx>,
|
rvalue: &Rvalue<'tcx>,
|
||||||
aggregate_kind: &AggregateKind<'tcx>,
|
aggregate_kind: &AggregateKind<'tcx>,
|
||||||
operands: &IndexSlice<FieldIdx, Operand<'tcx>>,
|
operands: &IndexSlice<FieldIdx, Operand<'tcx>>,
|
||||||
|
@ -2359,7 +2350,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let operand_ty = operand.ty(body, tcx);
|
let operand_ty = operand.ty(self.body, tcx);
|
||||||
let operand_ty = self.normalize(operand_ty, location);
|
let operand_ty = self.normalize(operand_ty, location);
|
||||||
|
|
||||||
if let Err(terr) = self.sub_types(
|
if let Err(terr) = self.sub_types(
|
||||||
|
@ -2389,7 +2380,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
/// - `borrowed_place`: the place `P` being borrowed
|
/// - `borrowed_place`: the place `P` being borrowed
|
||||||
fn add_reborrow_constraint(
|
fn add_reborrow_constraint(
|
||||||
&mut self,
|
&mut self,
|
||||||
body: &Body<'tcx>,
|
|
||||||
location: Location,
|
location: Location,
|
||||||
borrow_region: ty::Region<'tcx>,
|
borrow_region: ty::Region<'tcx>,
|
||||||
borrowed_place: &Place<'tcx>,
|
borrowed_place: &Place<'tcx>,
|
||||||
|
@ -2428,7 +2418,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
let def = self.body.source.def_id().expect_local();
|
let def = self.body.source.def_id().expect_local();
|
||||||
let upvars = tcx.closure_captures(def);
|
let upvars = tcx.closure_captures(def);
|
||||||
let field =
|
let field =
|
||||||
path_utils::is_upvar_field_projection(tcx, upvars, borrowed_place.as_ref(), body);
|
path_utils::is_upvar_field_projection(tcx, upvars, borrowed_place.as_ref(), self.body);
|
||||||
let category = if let Some(field) = field {
|
let category = if let Some(field) = field {
|
||||||
ConstraintCategory::ClosureUpvar(field)
|
ConstraintCategory::ClosureUpvar(field)
|
||||||
} else {
|
} else {
|
||||||
|
@ -2440,7 +2430,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
|
|
||||||
match elem {
|
match elem {
|
||||||
ProjectionElem::Deref => {
|
ProjectionElem::Deref => {
|
||||||
let base_ty = base.ty(body, tcx).ty;
|
let base_ty = base.ty(self.body, tcx).ty;
|
||||||
|
|
||||||
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
||||||
match base_ty.kind() {
|
match base_ty.kind() {
|
||||||
|
@ -2449,7 +2439,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
sup: ref_region.as_var(),
|
sup: ref_region.as_var(),
|
||||||
sub: borrow_region.as_var(),
|
sub: borrow_region.as_var(),
|
||||||
locations: location.to_locations(),
|
locations: location.to_locations(),
|
||||||
span: location.to_locations().span(body),
|
span: location.to_locations().span(self.body),
|
||||||
category,
|
category,
|
||||||
variance_info: ty::VarianceDiagInfo::default(),
|
variance_info: ty::VarianceDiagInfo::default(),
|
||||||
from_closure: false,
|
from_closure: false,
|
||||||
|
@ -2634,27 +2624,27 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||||
tcx.predicates_of(def_id).instantiate(tcx, args)
|
tcx.predicates_of(def_id).instantiate(tcx, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self, body), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
fn typeck_mir(&mut self, body: &Body<'tcx>) {
|
fn typeck_mir(&mut self) {
|
||||||
self.last_span = body.span;
|
self.last_span = self.body.span;
|
||||||
debug!(?body.span);
|
debug!(?self.body.span);
|
||||||
|
|
||||||
for (local, local_decl) in body.local_decls.iter_enumerated() {
|
for (local, local_decl) in self.body.local_decls.iter_enumerated() {
|
||||||
self.check_local(body, local, local_decl);
|
self.check_local(local, local_decl);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (block, block_data) in body.basic_blocks.iter_enumerated() {
|
for (block, block_data) in self.body.basic_blocks.iter_enumerated() {
|
||||||
let mut location = Location { block, statement_index: 0 };
|
let mut location = Location { block, statement_index: 0 };
|
||||||
for stmt in &block_data.statements {
|
for stmt in &block_data.statements {
|
||||||
if !stmt.source_info.span.is_dummy() {
|
if !stmt.source_info.span.is_dummy() {
|
||||||
self.last_span = stmt.source_info.span;
|
self.last_span = stmt.source_info.span;
|
||||||
}
|
}
|
||||||
self.check_stmt(body, stmt, location);
|
self.check_stmt(stmt, location);
|
||||||
location.statement_index += 1;
|
location.statement_index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.check_terminator(body, block_data.terminator(), location);
|
self.check_terminator(block_data.terminator(), location);
|
||||||
self.check_iscleanup(body, block_data);
|
self.check_iscleanup(block_data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,9 +75,10 @@ builtin_macros_autodiff_mode = unknown Mode: `{$mode}`. Use `Forward` or `Revers
|
||||||
builtin_macros_autodiff_mode_activity = {$act} can not be used in {$mode} Mode
|
builtin_macros_autodiff_mode_activity = {$act} can not be used in {$mode} Mode
|
||||||
builtin_macros_autodiff_not_build = this rustc version does not support autodiff
|
builtin_macros_autodiff_not_build = this rustc version does not support autodiff
|
||||||
builtin_macros_autodiff_number_activities = expected {$expected} activities, but found {$found}
|
builtin_macros_autodiff_number_activities = expected {$expected} activities, but found {$found}
|
||||||
|
builtin_macros_autodiff_ret_activity = invalid return activity {$act} in {$mode} Mode
|
||||||
builtin_macros_autodiff_ty_activity = {$act} can not be used for this type
|
builtin_macros_autodiff_ty_activity = {$act} can not be used for this type
|
||||||
|
|
||||||
builtin_macros_autodiff_unknown_activity = did not recognize Activity: `{$act}`
|
builtin_macros_autodiff_unknown_activity = did not recognize Activity: `{$act}`
|
||||||
|
|
||||||
builtin_macros_bad_derive_target = `derive` may only be applied to `struct`s, `enum`s and `union`s
|
builtin_macros_bad_derive_target = `derive` may only be applied to `struct`s, `enum`s and `union`s
|
||||||
.label = not applicable here
|
.label = not applicable here
|
||||||
.label2 = not a `struct`, `enum` or `union`
|
.label2 = not a `struct`, `enum` or `union`
|
||||||
|
|
|
@ -8,7 +8,8 @@ mod llvm_enzyme {
|
||||||
use std::string::String;
|
use std::string::String;
|
||||||
|
|
||||||
use rustc_ast::expand::autodiff_attrs::{
|
use rustc_ast::expand::autodiff_attrs::{
|
||||||
AutoDiffAttrs, DiffActivity, DiffMode, valid_input_activity, valid_ty_for_activity,
|
AutoDiffAttrs, DiffActivity, DiffMode, valid_input_activity, valid_ret_activity,
|
||||||
|
valid_ty_for_activity,
|
||||||
};
|
};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{Token, TokenKind};
|
use rustc_ast::token::{Token, TokenKind};
|
||||||
|
@ -577,6 +578,8 @@ mod llvm_enzyme {
|
||||||
//
|
//
|
||||||
// Error handling: If the user provides an invalid configuration (incorrect numbers, types, or
|
// Error handling: If the user provides an invalid configuration (incorrect numbers, types, or
|
||||||
// both), we emit an error and return the original signature. This allows us to continue parsing.
|
// both), we emit an error and return the original signature. This allows us to continue parsing.
|
||||||
|
// FIXME(Sa4dUs): make individual activities' span available so errors
|
||||||
|
// can point to only the activity instead of the entire attribute
|
||||||
fn gen_enzyme_decl(
|
fn gen_enzyme_decl(
|
||||||
ecx: &ExtCtxt<'_>,
|
ecx: &ExtCtxt<'_>,
|
||||||
sig: &ast::FnSig,
|
sig: &ast::FnSig,
|
||||||
|
@ -624,10 +627,22 @@ mod llvm_enzyme {
|
||||||
errors = true;
|
errors = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if has_ret && !valid_ret_activity(x.mode, x.ret_activity) {
|
||||||
|
dcx.emit_err(errors::AutoDiffInvalidRetAct {
|
||||||
|
span,
|
||||||
|
mode: x.mode.to_string(),
|
||||||
|
act: x.ret_activity.to_string(),
|
||||||
|
});
|
||||||
|
// We don't set `errors = true` to avoid annoying type errors relative
|
||||||
|
// to the expanded macro type signature
|
||||||
|
}
|
||||||
|
|
||||||
if errors {
|
if errors {
|
||||||
// This is not the right signature, but we can continue parsing.
|
// This is not the right signature, but we can continue parsing.
|
||||||
return (sig.clone(), new_inputs, idents, true);
|
return (sig.clone(), new_inputs, idents, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
let unsafe_activities = x
|
let unsafe_activities = x
|
||||||
.input_activity
|
.input_activity
|
||||||
.iter()
|
.iter()
|
||||||
|
|
|
@ -185,6 +185,15 @@ mod autodiff {
|
||||||
pub(crate) act: String,
|
pub(crate) act: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(builtin_macros_autodiff_ret_activity)]
|
||||||
|
pub(crate) struct AutoDiffInvalidRetAct {
|
||||||
|
#[primary_span]
|
||||||
|
pub(crate) span: Span,
|
||||||
|
pub(crate) mode: String,
|
||||||
|
pub(crate) act: String,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(builtin_macros_autodiff_mode)]
|
#[diag(builtin_macros_autodiff_mode)]
|
||||||
pub(crate) struct AutoDiffInvalidMode {
|
pub(crate) struct AutoDiffInvalidMode {
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use rustc_abi::ExternAbi;
|
use rustc_abi::ExternAbi;
|
||||||
use rustc_ast::expand::autodiff_attrs::{
|
use rustc_ast::expand::autodiff_attrs::{AutoDiffAttrs, DiffActivity, DiffMode};
|
||||||
AutoDiffAttrs, DiffActivity, DiffMode, valid_input_activity, valid_ret_activity,
|
|
||||||
};
|
|
||||||
use rustc_ast::{MetaItem, MetaItemInner, attr};
|
use rustc_ast::{MetaItem, MetaItemInner, attr};
|
||||||
use rustc_attr_parsing::ReprAttr::ReprAlign;
|
use rustc_attr_parsing::ReprAttr::ReprAlign;
|
||||||
use rustc_attr_parsing::{AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr};
|
use rustc_attr_parsing::{AttributeKind, InlineAttr, InstructionSetAttr, OptimizeAttr};
|
||||||
|
@ -874,15 +872,6 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for &input in &arg_activities {
|
|
||||||
if !valid_input_activity(mode, input) {
|
|
||||||
span_bug!(attr.span(), "Invalid input activity {} for {} mode", input, mode);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !valid_ret_activity(mode, ret_activity) {
|
|
||||||
span_bug!(attr.span(), "Invalid return activity {} for {} mode", ret_activity, mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(AutoDiffAttrs { mode, ret_activity, input_activity: arg_activities })
|
Some(AutoDiffAttrs { mode, ret_activity, input_activity: arg_activities })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -982,6 +982,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
||||||
todo.push(id);
|
todo.push(id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Also expose the provenance of the interpreter-level allocation, so it can
|
||||||
|
// be read by FFI. The `black_box` is defensive programming as LLVM likes
|
||||||
|
// to (incorrectly) optimize away ptr2int casts whose result is unused.
|
||||||
|
std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance());
|
||||||
|
|
||||||
// Prepare for possible write from native code if mutable.
|
// Prepare for possible write from native code if mutable.
|
||||||
if info.mutbl.is_mut() {
|
if info.mutbl.is_mut() {
|
||||||
|
|
|
@ -2294,18 +2294,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||||
{
|
{
|
||||||
let anon_const_type = tcx.type_of(param_def_id).instantiate(tcx, args);
|
let anon_const_type = tcx.type_of(param_def_id).instantiate(tcx, args);
|
||||||
|
|
||||||
// We must error if the instantiated type has any inference variables as we will
|
// FIXME(generic_const_parameter_types): Ideally we remove these errors below when
|
||||||
// use this type to feed the `type_of` and query results must not contain inference
|
// we have the ability to intermix typeck of anon const const args with the parent
|
||||||
// variables otherwise we will ICE.
|
// bodies typeck.
|
||||||
//
|
|
||||||
// We also error if the type contains any regions as effectively any region will wind
|
// We also error if the type contains any regions as effectively any region will wind
|
||||||
// up as a region variable in mir borrowck. It would also be somewhat concerning if
|
// up as a region variable in mir borrowck. It would also be somewhat concerning if
|
||||||
// hir typeck was using equality but mir borrowck wound up using subtyping as that could
|
// hir typeck was using equality but mir borrowck wound up using subtyping as that could
|
||||||
// result in a non-infer in hir typeck but a region variable in borrowck.
|
// result in a non-infer in hir typeck but a region variable in borrowck.
|
||||||
//
|
|
||||||
// FIXME(generic_const_parameter_types): Ideally we remove these errors one day when
|
|
||||||
// we have the ability to intermix typeck of anon const const args with the parent
|
|
||||||
// bodies typeck.
|
|
||||||
if tcx.features().generic_const_parameter_types()
|
if tcx.features().generic_const_parameter_types()
|
||||||
&& (anon_const_type.has_free_regions() || anon_const_type.has_erased_regions())
|
&& (anon_const_type.has_free_regions() || anon_const_type.has_erased_regions())
|
||||||
{
|
{
|
||||||
|
@ -2316,6 +2312,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||||
tcx.feed_anon_const_type(anon.def_id, ty::EarlyBinder::bind(Ty::new_error(tcx, e)));
|
tcx.feed_anon_const_type(anon.def_id, ty::EarlyBinder::bind(Ty::new_error(tcx, e)));
|
||||||
return ty::Const::new_error(tcx, e);
|
return ty::Const::new_error(tcx, e);
|
||||||
}
|
}
|
||||||
|
// We must error if the instantiated type has any inference variables as we will
|
||||||
|
// use this type to feed the `type_of` and query results must not contain inference
|
||||||
|
// variables otherwise we will ICE.
|
||||||
if anon_const_type.has_non_region_infer() {
|
if anon_const_type.has_non_region_infer() {
|
||||||
let e = tcx.dcx().span_err(
|
let e = tcx.dcx().span_err(
|
||||||
const_arg.span(),
|
const_arg.span(),
|
||||||
|
@ -2324,6 +2323,16 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||||
tcx.feed_anon_const_type(anon.def_id, ty::EarlyBinder::bind(Ty::new_error(tcx, e)));
|
tcx.feed_anon_const_type(anon.def_id, ty::EarlyBinder::bind(Ty::new_error(tcx, e)));
|
||||||
return ty::Const::new_error(tcx, e);
|
return ty::Const::new_error(tcx, e);
|
||||||
}
|
}
|
||||||
|
// We error when the type contains unsubstituted generics since we do not currently
|
||||||
|
// give the anon const any of the generics from the parent.
|
||||||
|
if anon_const_type.has_non_region_param() {
|
||||||
|
let e = tcx.dcx().span_err(
|
||||||
|
const_arg.span(),
|
||||||
|
"anonymous constants referencing generics are not yet supported",
|
||||||
|
);
|
||||||
|
tcx.feed_anon_const_type(anon.def_id, ty::EarlyBinder::bind(Ty::new_error(tcx, e)));
|
||||||
|
return ty::Const::new_error(tcx, e);
|
||||||
|
}
|
||||||
|
|
||||||
tcx.feed_anon_const_type(
|
tcx.feed_anon_const_type(
|
||||||
anon.def_id,
|
anon.def_id,
|
||||||
|
|
|
@ -220,6 +220,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
) {
|
) {
|
||||||
debug!("fcx {}", self.tag());
|
debug!("fcx {}", self.tag());
|
||||||
|
|
||||||
|
// Don't write user type annotations for const param types, since we give them
|
||||||
|
// identity args just so that we can trivially substitute their `EarlyBinder`.
|
||||||
|
// We enforce that they match their type in MIR later on.
|
||||||
|
if matches!(self.tcx.def_kind(def_id), DefKind::ConstParam) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if Self::can_contain_user_lifetime_bounds((args, user_self_ty)) {
|
if Self::can_contain_user_lifetime_bounds((args, user_self_ty)) {
|
||||||
let canonicalized = self.canonicalize_user_type_annotation(ty::UserType::new(
|
let canonicalized = self.canonicalize_user_type_annotation(ty::UserType::new(
|
||||||
ty::UserTypeKind::TypeOf(def_id, UserArgs { args, user_self_ty }),
|
ty::UserTypeKind::TypeOf(def_id, UserArgs { args, user_self_ty }),
|
||||||
|
|
|
@ -1636,7 +1636,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
ast::LitKind::Char(_) => tcx.types.char,
|
ast::LitKind::Char(_) => tcx.types.char,
|
||||||
ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => Ty::new_int(tcx, ty::int_ty(t)),
|
ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => Ty::new_int(tcx, ty::int_ty(t)),
|
||||||
ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => Ty::new_uint(tcx, ty::uint_ty(t)),
|
ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => Ty::new_uint(tcx, ty::uint_ty(t)),
|
||||||
ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
|
ast::LitKind::Int(i, ast::LitIntType::Unsuffixed) => {
|
||||||
let opt_ty = expected.to_option(self).and_then(|ty| match ty.kind() {
|
let opt_ty = expected.to_option(self).and_then(|ty| match ty.kind() {
|
||||||
ty::Int(_) | ty::Uint(_) => Some(ty),
|
ty::Int(_) | ty::Uint(_) => Some(ty),
|
||||||
// These exist to direct casts like `0x61 as char` to use
|
// These exist to direct casts like `0x61 as char` to use
|
||||||
|
@ -1645,6 +1645,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
ty::Char => Some(tcx.types.u8),
|
ty::Char => Some(tcx.types.u8),
|
||||||
ty::RawPtr(..) => Some(tcx.types.usize),
|
ty::RawPtr(..) => Some(tcx.types.usize),
|
||||||
ty::FnDef(..) | ty::FnPtr(..) => Some(tcx.types.usize),
|
ty::FnDef(..) | ty::FnPtr(..) => Some(tcx.types.usize),
|
||||||
|
&ty::Pat(base, _) if base.is_integral() => {
|
||||||
|
let layout = tcx
|
||||||
|
.layout_of(self.typing_env(self.param_env).as_query_input(ty))
|
||||||
|
.ok()?;
|
||||||
|
assert!(!layout.uninhabited);
|
||||||
|
|
||||||
|
match layout.backend_repr {
|
||||||
|
rustc_abi::BackendRepr::Scalar(scalar) => {
|
||||||
|
scalar.valid_range(&tcx).contains(u128::from(i.get())).then_some(ty)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
opt_ty.unwrap_or_else(|| self.next_int_var())
|
opt_ty.unwrap_or_else(|| self.next_int_var())
|
||||||
|
|
|
@ -679,6 +679,11 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
|
||||||
// Set provenance of all bytes to wildcard.
|
// Set provenance of all bytes to wildcard.
|
||||||
self.provenance.write_wildcards(self.len());
|
self.provenance.write_wildcards(self.len());
|
||||||
|
|
||||||
|
// Also expose the provenance of the interpreter-level allocation, so it can
|
||||||
|
// be written by FFI. The `black_box` is defensive programming as LLVM likes
|
||||||
|
// to (incorrectly) optimize away ptr2int casts whose result is unused.
|
||||||
|
std::hint::black_box(self.get_bytes_unchecked_raw_mut().expose_provenance());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,8 +53,6 @@ bitflags::bitflags! {
|
||||||
const IS_VARIANT_LIST_NON_EXHAUSTIVE = 1 << 8;
|
const IS_VARIANT_LIST_NON_EXHAUSTIVE = 1 << 8;
|
||||||
/// Indicates whether the type is `UnsafeCell`.
|
/// Indicates whether the type is `UnsafeCell`.
|
||||||
const IS_UNSAFE_CELL = 1 << 9;
|
const IS_UNSAFE_CELL = 1 << 9;
|
||||||
/// Indicates whether the type is anonymous.
|
|
||||||
const IS_ANONYMOUS = 1 << 10;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
rustc_data_structures::external_bitflags_debug! { AdtFlags }
|
rustc_data_structures::external_bitflags_debug! { AdtFlags }
|
||||||
|
@ -402,12 +400,6 @@ impl<'tcx> AdtDef<'tcx> {
|
||||||
self.flags().contains(AdtFlags::IS_MANUALLY_DROP)
|
self.flags().contains(AdtFlags::IS_MANUALLY_DROP)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this is an anonymous adt
|
|
||||||
#[inline]
|
|
||||||
pub fn is_anonymous(self) -> bool {
|
|
||||||
self.flags().contains(AdtFlags::IS_ANONYMOUS)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if this type has a destructor.
|
/// Returns `true` if this type has a destructor.
|
||||||
pub fn has_dtor(self, tcx: TyCtxt<'tcx>) -> bool {
|
pub fn has_dtor(self, tcx: TyCtxt<'tcx>) -> bool {
|
||||||
self.destructor(tcx).is_some()
|
self.destructor(tcx).is_some()
|
||||||
|
|
|
@ -1151,7 +1151,7 @@ pub struct VariantDef {
|
||||||
/// `DefId` that identifies the variant's constructor.
|
/// `DefId` that identifies the variant's constructor.
|
||||||
/// If this variant is a struct variant, then this is `None`.
|
/// If this variant is a struct variant, then this is `None`.
|
||||||
pub ctor: Option<(CtorKind, DefId)>,
|
pub ctor: Option<(CtorKind, DefId)>,
|
||||||
/// Variant or struct name, maybe empty for anonymous adt (struct or union).
|
/// Variant or struct name.
|
||||||
pub name: Symbol,
|
pub name: Symbol,
|
||||||
/// Discriminant of this variant.
|
/// Discriminant of this variant.
|
||||||
pub discr: VariantDiscr,
|
pub discr: VariantDiscr,
|
||||||
|
|
|
@ -117,7 +117,12 @@ fn lit_to_mir_constant<'tcx>(tcx: TyCtxt<'tcx>, lit_input: LitToConstInput<'tcx>
|
||||||
ConstValue::Scalar(Scalar::from_uint(result, width))
|
ConstValue::Scalar(Scalar::from_uint(result, width))
|
||||||
};
|
};
|
||||||
|
|
||||||
let value = match (lit, ty.kind()) {
|
let lit_ty = match *ty.kind() {
|
||||||
|
ty::Pat(base, _) => base,
|
||||||
|
_ => ty,
|
||||||
|
};
|
||||||
|
|
||||||
|
let value = match (lit, lit_ty.kind()) {
|
||||||
(ast::LitKind::Str(s, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_str() => {
|
(ast::LitKind::Str(s, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_str() => {
|
||||||
let s = s.as_str();
|
let s = s.as_str();
|
||||||
let allocation = Allocation::from_bytes_byte_aligned_immutable(s.as_bytes());
|
let allocation = Allocation::from_bytes_byte_aligned_immutable(s.as_bytes());
|
||||||
|
|
|
@ -207,7 +207,7 @@ impl<'tcx> ThirBuildCx<'tcx> {
|
||||||
&self,
|
&self,
|
||||||
hir_id: HirId,
|
hir_id: HirId,
|
||||||
) -> Option<ty::CanonicalUserType<'tcx>> {
|
) -> Option<ty::CanonicalUserType<'tcx>> {
|
||||||
crate::thir::util::user_args_applied_to_ty_of_hir_id(self.typeck_results, hir_id)
|
crate::thir::util::user_args_applied_to_ty_of_hir_id(self.tcx, self.typeck_results, hir_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -539,7 +539,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
||||||
&self,
|
&self,
|
||||||
hir_id: hir::HirId,
|
hir_id: hir::HirId,
|
||||||
) -> Option<ty::CanonicalUserType<'tcx>> {
|
) -> Option<ty::CanonicalUserType<'tcx>> {
|
||||||
crate::thir::util::user_args_applied_to_ty_of_hir_id(self.typeck_results, hir_id)
|
crate::thir::util::user_args_applied_to_ty_of_hir_id(self.tcx, self.typeck_results, hir_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Takes a HIR Path. If the path is a constant, evaluates it and feeds
|
/// Takes a HIR Path. If the path is a constant, evaluates it and feeds
|
||||||
|
|
|
@ -1,12 +1,16 @@
|
||||||
|
use std::assert_matches::assert_matches;
|
||||||
|
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
|
use rustc_hir::def::DefKind;
|
||||||
use rustc_middle::bug;
|
use rustc_middle::bug;
|
||||||
use rustc_middle::ty::{self, CanonicalUserType};
|
use rustc_middle::ty::{self, CanonicalUserType, TyCtxt};
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
|
|
||||||
/// Looks up the type associated with this hir-id and applies the
|
/// Looks up the type associated with this hir-id and applies the
|
||||||
/// user-given generic parameters; the hir-id must map to a suitable
|
/// user-given generic parameters; the hir-id must map to a suitable
|
||||||
/// type.
|
/// type.
|
||||||
pub(crate) fn user_args_applied_to_ty_of_hir_id<'tcx>(
|
pub(crate) fn user_args_applied_to_ty_of_hir_id<'tcx>(
|
||||||
|
tcx: TyCtxt<'tcx>,
|
||||||
typeck_results: &ty::TypeckResults<'tcx>,
|
typeck_results: &ty::TypeckResults<'tcx>,
|
||||||
hir_id: hir::HirId,
|
hir_id: hir::HirId,
|
||||||
) -> Option<CanonicalUserType<'tcx>> {
|
) -> Option<CanonicalUserType<'tcx>> {
|
||||||
|
@ -16,7 +20,23 @@ pub(crate) fn user_args_applied_to_ty_of_hir_id<'tcx>(
|
||||||
let ty = typeck_results.node_type(hir_id);
|
let ty = typeck_results.node_type(hir_id);
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::Adt(adt_def, ..) => {
|
ty::Adt(adt_def, ..) => {
|
||||||
|
// This "fixes" user type annotations for tupled ctor patterns for ADTs.
|
||||||
|
// That's because `type_of(ctor_did)` returns a FnDef, but we actually
|
||||||
|
// want to be annotating the type of the ADT itself. It's a bit goofy,
|
||||||
|
// but it's easier to adjust this here rather than in the path lowering
|
||||||
|
// code for patterns in HIR.
|
||||||
if let ty::UserTypeKind::TypeOf(did, _) = &mut user_ty.value.kind {
|
if let ty::UserTypeKind::TypeOf(did, _) = &mut user_ty.value.kind {
|
||||||
|
// This is either already set up correctly (struct, union, enum, or variant),
|
||||||
|
// or needs adjusting (ctor). Make sure we don't start adjusting other
|
||||||
|
// user annotations like consts or fn calls.
|
||||||
|
assert_matches!(
|
||||||
|
tcx.def_kind(*did),
|
||||||
|
DefKind::Ctor(..)
|
||||||
|
| DefKind::Struct
|
||||||
|
| DefKind::Enum
|
||||||
|
| DefKind::Union
|
||||||
|
| DefKind::Variant
|
||||||
|
);
|
||||||
*did = adt_def.did();
|
*did = adt_def.did();
|
||||||
}
|
}
|
||||||
Some(user_ty)
|
Some(user_ty)
|
||||||
|
|
|
@ -2231,15 +2231,6 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// `Copy` and `Clone` are automatically implemented for an anonymous adt
|
|
||||||
// if all of its fields are `Copy` and `Clone`
|
|
||||||
ty::Adt(adt, args) if adt.is_anonymous() => {
|
|
||||||
// (*) binder moved here
|
|
||||||
Where(obligation.predicate.rebind(
|
|
||||||
adt.non_enum_variant().fields.iter().map(|f| f.ty(self.tcx(), args)).collect(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
ty::Adt(..) | ty::Alias(..) | ty::Param(..) | ty::Placeholder(..) => {
|
ty::Adt(..) | ty::Alias(..) | ty::Param(..) | ty::Placeholder(..) => {
|
||||||
// Fallback to whatever user-defined impls exist in this case.
|
// Fallback to whatever user-defined impls exist in this case.
|
||||||
None
|
None
|
||||||
|
|
|
@ -220,6 +220,34 @@ fn layout_of_uncached<'tcx>(
|
||||||
.try_to_bits(tcx, cx.typing_env)
|
.try_to_bits(tcx, cx.typing_env)
|
||||||
.ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?;
|
.ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?;
|
||||||
|
|
||||||
|
// FIXME(pattern_types): create implied bounds from pattern types in signatures
|
||||||
|
// that require that the range end is >= the range start so that we can't hit
|
||||||
|
// this error anymore without first having hit a trait solver error.
|
||||||
|
// Very fuzzy on the details here, but pattern types are an internal impl detail,
|
||||||
|
// so we can just go with this for now
|
||||||
|
if scalar.is_signed() {
|
||||||
|
let range = scalar.valid_range_mut();
|
||||||
|
let start = layout.size.sign_extend(range.start);
|
||||||
|
let end = layout.size.sign_extend(range.end);
|
||||||
|
if end < start {
|
||||||
|
let guar = tcx.dcx().err(format!(
|
||||||
|
"pattern type ranges cannot wrap: {start}..={end}"
|
||||||
|
));
|
||||||
|
|
||||||
|
return Err(error(cx, LayoutError::ReferencesError(guar)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let range = scalar.valid_range_mut();
|
||||||
|
if range.end < range.start {
|
||||||
|
let guar = tcx.dcx().err(format!(
|
||||||
|
"pattern type ranges cannot wrap: {}..={}",
|
||||||
|
range.start, range.end
|
||||||
|
));
|
||||||
|
|
||||||
|
return Err(error(cx, LayoutError::ReferencesError(guar)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let niche = Niche {
|
let niche = Niche {
|
||||||
offset: Size::ZERO,
|
offset: Size::ZERO,
|
||||||
value: scalar.primitive(),
|
value: scalar.primitive(),
|
||||||
|
|
|
@ -163,6 +163,16 @@
|
||||||
# Custom CMake defines to set when building LLVM.
|
# Custom CMake defines to set when building LLVM.
|
||||||
#build-config = {}
|
#build-config = {}
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Tweaking how GCC is compiled
|
||||||
|
# =============================================================================
|
||||||
|
[gcc]
|
||||||
|
# Download GCC from CI instead of building it locally.
|
||||||
|
# Note that this will attempt to download GCC even if there are local
|
||||||
|
# modifications to the `src/gcc` submodule.
|
||||||
|
# Currently, this is only supported for the `x86_64-unknown-linux-gnu` target.
|
||||||
|
# download-ci-gcc = false
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# General build configuration options
|
# General build configuration options
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
|
@ -1454,6 +1454,20 @@ impl File {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[cfg_attr(
|
||||||
|
any(
|
||||||
|
target_os = "android",
|
||||||
|
all(
|
||||||
|
target_os = "linux",
|
||||||
|
target_env = "gnu",
|
||||||
|
target_pointer_width = "32",
|
||||||
|
not(target_arch = "riscv32")
|
||||||
|
)
|
||||||
|
),
|
||||||
|
no_sanitize(cfi)
|
||||||
|
)]
|
||||||
pub fn set_times(&self, times: FileTimes) -> io::Result<()> {
|
pub fn set_times(&self, times: FileTimes) -> io::Result<()> {
|
||||||
#[cfg(not(any(
|
#[cfg(not(any(
|
||||||
target_os = "redox",
|
target_os = "redox",
|
||||||
|
|
|
@ -251,6 +251,9 @@ impl FileDesc {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(target_os = "android", target_pointer_width = "32"))]
|
#[cfg(all(target_os = "android", target_pointer_width = "32"))]
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[no_sanitize(cfi)]
|
||||||
pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
|
pub fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
|
||||||
super::weak::weak!(fn preadv64(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
super::weak::weak!(fn preadv64(libc::c_int, *const libc::iovec, libc::c_int, off64_t) -> isize);
|
||||||
|
|
||||||
|
|
|
@ -434,6 +434,9 @@ impl Command {
|
||||||
target_os = "nto",
|
target_os = "nto",
|
||||||
target_vendor = "apple",
|
target_vendor = "apple",
|
||||||
))]
|
))]
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[cfg_attr(target_os = "linux", no_sanitize(cfi))]
|
||||||
fn posix_spawn(
|
fn posix_spawn(
|
||||||
&mut self,
|
&mut self,
|
||||||
stdio: &ChildPipes,
|
stdio: &ChildPipes,
|
||||||
|
|
|
@ -188,6 +188,9 @@ impl Thread {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(target_os = "solaris", target_os = "illumos", target_os = "nto"))]
|
#[cfg(any(target_os = "solaris", target_os = "illumos", target_os = "nto"))]
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[no_sanitize(cfi)]
|
||||||
pub fn set_name(name: &CStr) {
|
pub fn set_name(name: &CStr) {
|
||||||
weak! {
|
weak! {
|
||||||
fn pthread_setname_np(
|
fn pthread_setname_np(
|
||||||
|
|
|
@ -96,6 +96,17 @@ impl Timespec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[cfg_attr(
|
||||||
|
all(
|
||||||
|
target_os = "linux",
|
||||||
|
target_env = "gnu",
|
||||||
|
target_pointer_width = "32",
|
||||||
|
not(target_arch = "riscv32")
|
||||||
|
),
|
||||||
|
no_sanitize(cfi)
|
||||||
|
)]
|
||||||
pub fn now(clock: libc::clockid_t) -> Timespec {
|
pub fn now(clock: libc::clockid_t) -> Timespec {
|
||||||
use crate::mem::MaybeUninit;
|
use crate::mem::MaybeUninit;
|
||||||
use crate::sys::cvt;
|
use crate::sys::cvt;
|
||||||
|
|
|
@ -144,6 +144,9 @@ unsafe fn fetch(name: &str) -> *mut libc::c_void {
|
||||||
#[cfg(not(any(target_os = "linux", target_os = "android")))]
|
#[cfg(not(any(target_os = "linux", target_os = "android")))]
|
||||||
pub(crate) macro syscall {
|
pub(crate) macro syscall {
|
||||||
(fn $name:ident($($arg_name:ident: $t:ty),*) -> $ret:ty) => (
|
(fn $name:ident($($arg_name:ident: $t:ty),*) -> $ret:ty) => (
|
||||||
|
// FIXME(#115199): Rust currently omits weak function definitions
|
||||||
|
// and its metadata from LLVM IR.
|
||||||
|
#[no_sanitize(cfi)]
|
||||||
unsafe fn $name($($arg_name: $t),*) -> $ret {
|
unsafe fn $name($($arg_name: $t),*) -> $ret {
|
||||||
weak! { fn $name($($t),*) -> $ret }
|
weak! { fn $name($($t),*) -> $ret }
|
||||||
|
|
||||||
|
|
4
src/bootstrap/download-ci-gcc-stamp
Normal file
4
src/bootstrap/download-ci-gcc-stamp
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
Change this file to make users of the `download-ci-gcc` configuration download
|
||||||
|
a new version of GCC from CI, even if the GCC submodule hasn’t changed.
|
||||||
|
|
||||||
|
Last change is for: https://github.com/rust-lang/rust/pull/138051
|
|
@ -14,13 +14,67 @@ use std::sync::OnceLock;
|
||||||
|
|
||||||
use build_helper::ci::CiEnv;
|
use build_helper::ci::CiEnv;
|
||||||
|
|
||||||
use crate::Kind;
|
use crate::core::builder::{Builder, Cargo, Kind, RunConfig, ShouldRun, Step};
|
||||||
use crate::core::builder::{Builder, Cargo, RunConfig, ShouldRun, Step};
|
|
||||||
use crate::core::config::TargetSelection;
|
use crate::core::config::TargetSelection;
|
||||||
use crate::utils::build_stamp::{BuildStamp, generate_smart_stamp_hash};
|
use crate::utils::build_stamp::{BuildStamp, generate_smart_stamp_hash};
|
||||||
use crate::utils::exec::command;
|
use crate::utils::exec::command;
|
||||||
use crate::utils::helpers::{self, t};
|
use crate::utils::helpers::{self, t};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
|
pub struct Gcc {
|
||||||
|
pub target: TargetSelection,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct GccOutput {
|
||||||
|
pub libgccjit: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Step for Gcc {
|
||||||
|
type Output = GccOutput;
|
||||||
|
|
||||||
|
const ONLY_HOSTS: bool = true;
|
||||||
|
|
||||||
|
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||||
|
run.path("src/gcc").alias("gcc")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_run(run: RunConfig<'_>) {
|
||||||
|
run.builder.ensure(Gcc { target: run.target });
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compile GCC (specifically `libgccjit`) for `target`.
|
||||||
|
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
||||||
|
let target = self.target;
|
||||||
|
|
||||||
|
// If GCC has already been built, we avoid building it again.
|
||||||
|
let metadata = match get_gcc_build_status(builder, target) {
|
||||||
|
GccBuildStatus::AlreadyBuilt(path) => return GccOutput { libgccjit: path },
|
||||||
|
GccBuildStatus::ShouldBuild(m) => m,
|
||||||
|
};
|
||||||
|
|
||||||
|
let _guard = builder.msg_unstaged(Kind::Build, "GCC", target);
|
||||||
|
t!(metadata.stamp.remove());
|
||||||
|
let _time = helpers::timeit(builder);
|
||||||
|
|
||||||
|
let libgccjit_path = libgccjit_built_path(&metadata.install_dir);
|
||||||
|
if builder.config.dry_run() {
|
||||||
|
return GccOutput { libgccjit: libgccjit_path };
|
||||||
|
}
|
||||||
|
|
||||||
|
build_gcc(&metadata, builder, target);
|
||||||
|
|
||||||
|
let lib_alias = metadata.install_dir.join("lib/libgccjit.so.0");
|
||||||
|
if !lib_alias.exists() {
|
||||||
|
t!(builder.symlink_file(&libgccjit_path, lib_alias));
|
||||||
|
}
|
||||||
|
|
||||||
|
t!(metadata.stamp.write());
|
||||||
|
|
||||||
|
GccOutput { libgccjit: libgccjit_path }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Meta {
|
pub struct Meta {
|
||||||
stamp: BuildStamp,
|
stamp: BuildStamp,
|
||||||
out_dir: PathBuf,
|
out_dir: PathBuf,
|
||||||
|
@ -34,17 +88,45 @@ pub enum GccBuildStatus {
|
||||||
ShouldBuild(Meta),
|
ShouldBuild(Meta),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This returns whether we've already previously built GCC.
|
/// Tries to download GCC from CI if it is enabled and GCC artifacts
|
||||||
|
/// are available for the given target.
|
||||||
|
/// Returns a path to the libgccjit.so file.
|
||||||
|
#[cfg(not(test))]
|
||||||
|
fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option<PathBuf> {
|
||||||
|
// Try to download GCC from CI if configured and available
|
||||||
|
if !matches!(builder.config.gcc_ci_mode, crate::core::config::GccCiMode::DownloadFromCi) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if target != "x86_64-unknown-linux-gnu" {
|
||||||
|
eprintln!("GCC CI download is only available for the `x86_64-unknown-linux-gnu` target");
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let sha =
|
||||||
|
detect_gcc_sha(&builder.config, builder.config.rust_info.is_managed_git_subrepository());
|
||||||
|
let root = ci_gcc_root(&builder.config);
|
||||||
|
let gcc_stamp = BuildStamp::new(&root).with_prefix("gcc").add_stamp(&sha);
|
||||||
|
if !gcc_stamp.is_up_to_date() && !builder.config.dry_run() {
|
||||||
|
builder.config.download_ci_gcc(&sha, &root);
|
||||||
|
t!(gcc_stamp.write());
|
||||||
|
}
|
||||||
|
// FIXME: put libgccjit.so into a lib directory in dist::Gcc
|
||||||
|
Some(root.join("libgccjit.so"))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn try_download_gcc(_builder: &Builder<'_>, _target: TargetSelection) -> Option<PathBuf> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This returns information about whether GCC should be built or if it's already built.
|
||||||
|
/// It transparently handles downloading GCC from CI if needed.
|
||||||
///
|
///
|
||||||
/// It's used to avoid busting caches during x.py check -- if we've already built
|
/// It's used to avoid busting caches during x.py check -- if we've already built
|
||||||
/// GCC, it's fine for us to not try to avoid doing so.
|
/// GCC, it's fine for us to not try to avoid doing so.
|
||||||
pub fn prebuilt_gcc_config(builder: &Builder<'_>, target: TargetSelection) -> GccBuildStatus {
|
pub fn get_gcc_build_status(builder: &Builder<'_>, target: TargetSelection) -> GccBuildStatus {
|
||||||
// Initialize the gcc submodule if not initialized already.
|
if let Some(path) = try_download_gcc(builder, target) {
|
||||||
builder.config.update_submodule("src/gcc");
|
return GccBuildStatus::AlreadyBuilt(path);
|
||||||
|
}
|
||||||
let root = builder.src.join("src/gcc");
|
|
||||||
let out_dir = builder.gcc_out(target).join("build");
|
|
||||||
let install_dir = builder.gcc_out(target).join("install");
|
|
||||||
|
|
||||||
static STAMP_HASH_MEMO: OnceLock<String> = OnceLock::new();
|
static STAMP_HASH_MEMO: OnceLock<String> = OnceLock::new();
|
||||||
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
|
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
|
||||||
|
@ -55,6 +137,13 @@ pub fn prebuilt_gcc_config(builder: &Builder<'_>, target: TargetSelection) -> Gc
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Initialize the gcc submodule if not initialized already.
|
||||||
|
builder.config.update_submodule("src/gcc");
|
||||||
|
|
||||||
|
let root = builder.src.join("src/gcc");
|
||||||
|
let out_dir = builder.gcc_out(target).join("build");
|
||||||
|
let install_dir = builder.gcc_out(target).join("install");
|
||||||
|
|
||||||
let stamp = BuildStamp::new(&out_dir).with_prefix("gcc").add_stamp(smart_stamp_hash);
|
let stamp = BuildStamp::new(&out_dir).with_prefix("gcc").add_stamp(smart_stamp_hash);
|
||||||
|
|
||||||
if stamp.is_up_to_date() {
|
if stamp.is_up_to_date() {
|
||||||
|
@ -87,125 +176,72 @@ fn libgccjit_built_path(install_dir: &Path) -> PathBuf {
|
||||||
install_dir.join("lib/libgccjit.so")
|
install_dir.join("lib/libgccjit.so")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) {
|
||||||
pub struct Gcc {
|
let Meta { stamp: _, out_dir, install_dir, root } = metadata;
|
||||||
pub target: TargetSelection,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
t!(fs::create_dir_all(out_dir));
|
||||||
pub struct GccOutput {
|
t!(fs::create_dir_all(install_dir));
|
||||||
pub libgccjit: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Step for Gcc {
|
// GCC creates files (e.g. symlinks to the downloaded dependencies)
|
||||||
type Output = GccOutput;
|
// in the source directory, which does not work with our CI setup, where we mount
|
||||||
|
// source directories as read-only on Linux.
|
||||||
const ONLY_HOSTS: bool = true;
|
// Therefore, as a part of the build in CI, we first copy the whole source directory
|
||||||
|
// to the build directory, and perform the build from there.
|
||||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
let src_dir = if CiEnv::is_ci() {
|
||||||
run.path("src/gcc").alias("gcc")
|
let src_dir = builder.gcc_out(target).join("src");
|
||||||
}
|
if src_dir.exists() {
|
||||||
|
builder.remove_dir(&src_dir);
|
||||||
fn make_run(run: RunConfig<'_>) {
|
|
||||||
run.builder.ensure(Gcc { target: run.target });
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Compile GCC (specifically `libgccjit`) for `target`.
|
|
||||||
fn run(self, builder: &Builder<'_>) -> Self::Output {
|
|
||||||
let target = self.target;
|
|
||||||
|
|
||||||
// If GCC has already been built, we avoid building it again.
|
|
||||||
let Meta { stamp, out_dir, install_dir, root } = match prebuilt_gcc_config(builder, target)
|
|
||||||
{
|
|
||||||
GccBuildStatus::AlreadyBuilt(path) => return GccOutput { libgccjit: path },
|
|
||||||
GccBuildStatus::ShouldBuild(m) => m,
|
|
||||||
};
|
|
||||||
|
|
||||||
let _guard = builder.msg_unstaged(Kind::Build, "GCC", target);
|
|
||||||
t!(stamp.remove());
|
|
||||||
let _time = helpers::timeit(builder);
|
|
||||||
t!(fs::create_dir_all(&out_dir));
|
|
||||||
t!(fs::create_dir_all(&install_dir));
|
|
||||||
|
|
||||||
let libgccjit_path = libgccjit_built_path(&install_dir);
|
|
||||||
if builder.config.dry_run() {
|
|
||||||
return GccOutput { libgccjit: libgccjit_path };
|
|
||||||
}
|
}
|
||||||
|
builder.create_dir(&src_dir);
|
||||||
|
builder.cp_link_r(root, &src_dir);
|
||||||
|
src_dir
|
||||||
|
} else {
|
||||||
|
root.clone()
|
||||||
|
};
|
||||||
|
|
||||||
// GCC creates files (e.g. symlinks to the downloaded dependencies)
|
command(src_dir.join("contrib/download_prerequisites")).current_dir(&src_dir).run(builder);
|
||||||
// in the source directory, which does not work with our CI setup, where we mount
|
let mut configure_cmd = command(src_dir.join("configure"));
|
||||||
// source directories as read-only on Linux.
|
configure_cmd
|
||||||
// Therefore, as a part of the build in CI, we first copy the whole source directory
|
.current_dir(out_dir)
|
||||||
// to the build directory, and perform the build from there.
|
// On CI, we compile GCC with Clang.
|
||||||
let src_dir = if CiEnv::is_ci() {
|
// The -Wno-everything flag is needed to make GCC compile with Clang 19.
|
||||||
let src_dir = builder.gcc_out(target).join("src");
|
// `-g -O2` are the default flags that are otherwise used by Make.
|
||||||
if src_dir.exists() {
|
// FIXME(kobzol): change the flags once we have [gcc] configuration in config.toml.
|
||||||
builder.remove_dir(&src_dir);
|
.env("CXXFLAGS", "-Wno-everything -g -O2")
|
||||||
}
|
.env("CFLAGS", "-Wno-everything -g -O2")
|
||||||
builder.create_dir(&src_dir);
|
.arg("--enable-host-shared")
|
||||||
builder.cp_link_r(&root, &src_dir);
|
.arg("--enable-languages=jit")
|
||||||
src_dir
|
.arg("--enable-checking=release")
|
||||||
} else {
|
.arg("--disable-bootstrap")
|
||||||
root
|
.arg("--disable-multilib")
|
||||||
};
|
.arg(format!("--prefix={}", install_dir.display()));
|
||||||
|
let cc = builder.build.cc(target).display().to_string();
|
||||||
|
let cc = builder
|
||||||
|
.build
|
||||||
|
.config
|
||||||
|
.ccache
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(|| cc.clone(), |ccache| format!("{ccache} {cc}"));
|
||||||
|
configure_cmd.env("CC", cc);
|
||||||
|
|
||||||
command(src_dir.join("contrib/download_prerequisites")).current_dir(&src_dir).run(builder);
|
if let Ok(ref cxx) = builder.build.cxx(target) {
|
||||||
let mut configure_cmd = command(src_dir.join("configure"));
|
let cxx = cxx.display().to_string();
|
||||||
configure_cmd
|
let cxx = builder
|
||||||
.current_dir(&out_dir)
|
|
||||||
// On CI, we compile GCC with Clang.
|
|
||||||
// The -Wno-everything flag is needed to make GCC compile with Clang 19.
|
|
||||||
// `-g -O2` are the default flags that are otherwise used by Make.
|
|
||||||
// FIXME(kobzol): change the flags once we have [gcc] configuration in config.toml.
|
|
||||||
.env("CXXFLAGS", "-Wno-everything -g -O2")
|
|
||||||
.env("CFLAGS", "-Wno-everything -g -O2")
|
|
||||||
.arg("--enable-host-shared")
|
|
||||||
.arg("--enable-languages=jit")
|
|
||||||
.arg("--enable-checking=release")
|
|
||||||
.arg("--disable-bootstrap")
|
|
||||||
.arg("--disable-multilib")
|
|
||||||
.arg(format!("--prefix={}", install_dir.display()));
|
|
||||||
let cc = builder.build.cc(target).display().to_string();
|
|
||||||
let cc = builder
|
|
||||||
.build
|
.build
|
||||||
.config
|
.config
|
||||||
.ccache
|
.ccache
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or_else(|| cc.clone(), |ccache| format!("{ccache} {cc}"));
|
.map_or_else(|| cxx.clone(), |ccache| format!("{ccache} {cxx}"));
|
||||||
configure_cmd.env("CC", cc);
|
configure_cmd.env("CXX", cxx);
|
||||||
|
|
||||||
if let Ok(ref cxx) = builder.build.cxx(target) {
|
|
||||||
let cxx = cxx.display().to_string();
|
|
||||||
let cxx = builder
|
|
||||||
.build
|
|
||||||
.config
|
|
||||||
.ccache
|
|
||||||
.as_ref()
|
|
||||||
.map_or_else(|| cxx.clone(), |ccache| format!("{ccache} {cxx}"));
|
|
||||||
configure_cmd.env("CXX", cxx);
|
|
||||||
}
|
|
||||||
configure_cmd.run(builder);
|
|
||||||
|
|
||||||
command("make")
|
|
||||||
.current_dir(&out_dir)
|
|
||||||
.arg("--silent")
|
|
||||||
.arg(format!("-j{}", builder.jobs()))
|
|
||||||
.run_capture_stdout(builder);
|
|
||||||
command("make")
|
|
||||||
.current_dir(&out_dir)
|
|
||||||
.arg("--silent")
|
|
||||||
.arg("install")
|
|
||||||
.run_capture_stdout(builder);
|
|
||||||
|
|
||||||
let lib_alias = install_dir.join("lib/libgccjit.so.0");
|
|
||||||
if !lib_alias.exists() {
|
|
||||||
t!(builder.symlink_file(&libgccjit_path, lib_alias));
|
|
||||||
}
|
|
||||||
|
|
||||||
t!(stamp.write());
|
|
||||||
|
|
||||||
GccOutput { libgccjit: libgccjit_path }
|
|
||||||
}
|
}
|
||||||
|
configure_cmd.run(builder);
|
||||||
|
|
||||||
|
command("make")
|
||||||
|
.current_dir(out_dir)
|
||||||
|
.arg("--silent")
|
||||||
|
.arg(format!("-j{}", builder.jobs()))
|
||||||
|
.run_capture_stdout(builder);
|
||||||
|
command("make").current_dir(out_dir).arg("--silent").arg("install").run_capture_stdout(builder);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configures a Cargo invocation so that it can build the GCC codegen backend.
|
/// Configures a Cargo invocation so that it can build the GCC codegen backend.
|
||||||
|
@ -213,3 +249,38 @@ pub fn add_cg_gcc_cargo_flags(cargo: &mut Cargo, gcc: &GccOutput) {
|
||||||
// Add the path to libgccjit.so to the linker search paths.
|
// Add the path to libgccjit.so to the linker search paths.
|
||||||
cargo.rustflag(&format!("-L{}", gcc.libgccjit.parent().unwrap().to_str().unwrap()));
|
cargo.rustflag(&format!("-L{}", gcc.libgccjit.parent().unwrap().to_str().unwrap()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The absolute path to the downloaded GCC artifacts.
|
||||||
|
#[cfg(not(test))]
|
||||||
|
fn ci_gcc_root(config: &crate::Config) -> PathBuf {
|
||||||
|
config.out.join(config.build).join("ci-gcc")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This retrieves the GCC sha we *want* to use, according to git history.
|
||||||
|
#[cfg(not(test))]
|
||||||
|
fn detect_gcc_sha(config: &crate::Config, is_git: bool) -> String {
|
||||||
|
use build_helper::git::get_closest_merge_commit;
|
||||||
|
|
||||||
|
let gcc_sha = if is_git {
|
||||||
|
get_closest_merge_commit(
|
||||||
|
Some(&config.src),
|
||||||
|
&config.git_config(),
|
||||||
|
&[config.src.join("src/gcc"), config.src.join("src/bootstrap/download-ci-gcc-stamp")],
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
} else if let Some(info) = crate::utils::channel::read_commit_info_file(&config.src) {
|
||||||
|
info.sha.trim().to_owned()
|
||||||
|
} else {
|
||||||
|
"".to_owned()
|
||||||
|
};
|
||||||
|
|
||||||
|
if gcc_sha.is_empty() {
|
||||||
|
eprintln!("error: could not find commit hash for downloading GCC");
|
||||||
|
eprintln!("HELP: maybe your repository history is too shallow?");
|
||||||
|
eprintln!("HELP: consider disabling `download-ci-gcc`");
|
||||||
|
eprintln!("HELP: or fetch enough history to include one upstream commit");
|
||||||
|
panic!();
|
||||||
|
}
|
||||||
|
|
||||||
|
gcc_sha
|
||||||
|
}
|
||||||
|
|
|
@ -261,8 +261,14 @@ fn ci_rustc_if_unchanged_logic() {
|
||||||
// Make sure "if-unchanged" logic doesn't try to use CI rustc while there are changes
|
// Make sure "if-unchanged" logic doesn't try to use CI rustc while there are changes
|
||||||
// in compiler and/or library.
|
// in compiler and/or library.
|
||||||
if config.download_rustc_commit.is_some() {
|
if config.download_rustc_commit.is_some() {
|
||||||
let has_changes =
|
let mut paths = vec!["compiler"];
|
||||||
config.last_modified_commit(&["compiler", "library"], "download-rustc", true).is_none();
|
|
||||||
|
// Handle library tree the same way as in `Config::download_ci_rustc_commit`.
|
||||||
|
if build_helper::ci::CiEnv::is_ci() {
|
||||||
|
paths.push("library");
|
||||||
|
}
|
||||||
|
|
||||||
|
let has_changes = config.last_modified_commit(&paths, "download-rustc", true).is_none();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
!has_changes,
|
!has_changes,
|
||||||
|
|
|
@ -171,6 +171,17 @@ impl LldMode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Determines how will GCC be provided.
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
pub enum GccCiMode {
|
||||||
|
/// Build GCC from the local `src/gcc` submodule.
|
||||||
|
#[default]
|
||||||
|
BuildLocally,
|
||||||
|
/// Try to download GCC from CI.
|
||||||
|
/// If it is not available on CI, it will be built locally instead.
|
||||||
|
DownloadFromCi,
|
||||||
|
}
|
||||||
|
|
||||||
/// Global configuration for the entire build and/or bootstrap.
|
/// Global configuration for the entire build and/or bootstrap.
|
||||||
///
|
///
|
||||||
/// This structure is parsed from `config.toml`, and some of the fields are inferred from `git` or build-time parameters.
|
/// This structure is parsed from `config.toml`, and some of the fields are inferred from `git` or build-time parameters.
|
||||||
|
@ -283,6 +294,9 @@ pub struct Config {
|
||||||
pub llvm_ldflags: Option<String>,
|
pub llvm_ldflags: Option<String>,
|
||||||
pub llvm_use_libcxx: bool,
|
pub llvm_use_libcxx: bool,
|
||||||
|
|
||||||
|
// gcc codegen options
|
||||||
|
pub gcc_ci_mode: GccCiMode,
|
||||||
|
|
||||||
// rust codegen options
|
// rust codegen options
|
||||||
pub rust_optimize: RustOptimize,
|
pub rust_optimize: RustOptimize,
|
||||||
pub rust_codegen_units: Option<u32>,
|
pub rust_codegen_units: Option<u32>,
|
||||||
|
@ -676,6 +690,7 @@ pub(crate) struct TomlConfig {
|
||||||
build: Option<Build>,
|
build: Option<Build>,
|
||||||
install: Option<Install>,
|
install: Option<Install>,
|
||||||
llvm: Option<Llvm>,
|
llvm: Option<Llvm>,
|
||||||
|
gcc: Option<Gcc>,
|
||||||
rust: Option<Rust>,
|
rust: Option<Rust>,
|
||||||
target: Option<HashMap<String, TomlTarget>>,
|
target: Option<HashMap<String, TomlTarget>>,
|
||||||
dist: Option<Dist>,
|
dist: Option<Dist>,
|
||||||
|
@ -710,7 +725,7 @@ trait Merge {
|
||||||
impl Merge for TomlConfig {
|
impl Merge for TomlConfig {
|
||||||
fn merge(
|
fn merge(
|
||||||
&mut self,
|
&mut self,
|
||||||
TomlConfig { build, install, llvm, rust, dist, target, profile, change_id }: Self,
|
TomlConfig { build, install, llvm, gcc, rust, dist, target, profile, change_id }: Self,
|
||||||
replace: ReplaceOpt,
|
replace: ReplaceOpt,
|
||||||
) {
|
) {
|
||||||
fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
|
fn do_merge<T: Merge>(x: &mut Option<T>, y: Option<T>, replace: ReplaceOpt) {
|
||||||
|
@ -729,6 +744,7 @@ impl Merge for TomlConfig {
|
||||||
do_merge(&mut self.build, build, replace);
|
do_merge(&mut self.build, build, replace);
|
||||||
do_merge(&mut self.install, install, replace);
|
do_merge(&mut self.install, install, replace);
|
||||||
do_merge(&mut self.llvm, llvm, replace);
|
do_merge(&mut self.llvm, llvm, replace);
|
||||||
|
do_merge(&mut self.gcc, gcc, replace);
|
||||||
do_merge(&mut self.rust, rust, replace);
|
do_merge(&mut self.rust, rust, replace);
|
||||||
do_merge(&mut self.dist, dist, replace);
|
do_merge(&mut self.dist, dist, replace);
|
||||||
|
|
||||||
|
@ -995,6 +1011,13 @@ define_config! {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
define_config! {
|
||||||
|
/// TOML representation of how the GCC build is configured.
|
||||||
|
struct Gcc {
|
||||||
|
download_ci_gcc: Option<bool> = "download-ci-gcc",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
define_config! {
|
define_config! {
|
||||||
struct Dist {
|
struct Dist {
|
||||||
sign_folder: Option<String> = "sign-folder",
|
sign_folder: Option<String> = "sign-folder",
|
||||||
|
@ -2136,6 +2159,16 @@ impl Config {
|
||||||
config.llvm_from_ci = config.parse_download_ci_llvm(None, false);
|
config.llvm_from_ci = config.parse_download_ci_llvm(None, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(gcc) = toml.gcc {
|
||||||
|
config.gcc_ci_mode = match gcc.download_ci_gcc {
|
||||||
|
Some(value) => match value {
|
||||||
|
true => GccCiMode::DownloadFromCi,
|
||||||
|
false => GccCiMode::BuildLocally,
|
||||||
|
},
|
||||||
|
None => GccCiMode::default(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(t) = toml.target {
|
if let Some(t) = toml.target {
|
||||||
for (triple, cfg) in t {
|
for (triple, cfg) in t {
|
||||||
let mut target = Target::from_triple(&triple);
|
let mut target = Target::from_triple(&triple);
|
||||||
|
@ -2985,6 +3018,9 @@ impl Config {
|
||||||
// these changes to speed up the build process for library developers. This provides consistent
|
// these changes to speed up the build process for library developers. This provides consistent
|
||||||
// functionality for library developers between `download-rustc=true` and `download-rustc="if-unchanged"`
|
// functionality for library developers between `download-rustc=true` and `download-rustc="if-unchanged"`
|
||||||
// options.
|
// options.
|
||||||
|
//
|
||||||
|
// If you update "library" logic here, update `builder::tests::ci_rustc_if_unchanged_logic` test
|
||||||
|
// logic accordingly.
|
||||||
if !CiEnv::is_ci() {
|
if !CiEnv::is_ci() {
|
||||||
allowed_paths.push(":!library");
|
allowed_paths.push(":!library");
|
||||||
}
|
}
|
||||||
|
|
|
@ -826,6 +826,34 @@ download-rustc = false
|
||||||
let llvm_root = self.ci_llvm_root();
|
let llvm_root = self.ci_llvm_root();
|
||||||
self.unpack(&tarball, &llvm_root, "rust-dev");
|
self.unpack(&tarball, &llvm_root, "rust-dev");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn download_ci_gcc(&self, gcc_sha: &str, root_dir: &Path) {
|
||||||
|
let cache_prefix = format!("gcc-{gcc_sha}");
|
||||||
|
let cache_dst =
|
||||||
|
self.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| self.out.join("cache"));
|
||||||
|
|
||||||
|
let gcc_cache = cache_dst.join(cache_prefix);
|
||||||
|
if !gcc_cache.exists() {
|
||||||
|
t!(fs::create_dir_all(&gcc_cache));
|
||||||
|
}
|
||||||
|
let base = &self.stage0_metadata.config.artifacts_server;
|
||||||
|
let filename = format!("gcc-nightly-{}.tar.xz", self.build.triple);
|
||||||
|
let tarball = gcc_cache.join(&filename);
|
||||||
|
if !tarball.exists() {
|
||||||
|
let help_on_error = "ERROR: failed to download gcc from ci
|
||||||
|
|
||||||
|
HELP: There could be two reasons behind this:
|
||||||
|
1) The host triple is not supported for `download-ci-gcc`.
|
||||||
|
2) Old builds get deleted after a certain time.
|
||||||
|
HELP: In either case, disable `download-ci-gcc` in your config.toml:
|
||||||
|
|
||||||
|
[gcc]
|
||||||
|
download-ci-gcc = false
|
||||||
|
";
|
||||||
|
self.download_file(&format!("{base}/{gcc_sha}/{filename}"), &tarball, help_on_error);
|
||||||
|
}
|
||||||
|
self.unpack(&tarball, root_dir, "gcc");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn path_is_dylib(path: &Path) -> bool {
|
fn path_is_dylib(path: &Path) -> bool {
|
||||||
|
|
|
@ -370,4 +370,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
|
||||||
severity: ChangeSeverity::Info,
|
severity: ChangeSeverity::Info,
|
||||||
summary: "The rust.description option has moved to build.description and rust.description is now deprecated.",
|
summary: "The rust.description option has moved to build.description and rust.description is now deprecated.",
|
||||||
},
|
},
|
||||||
|
ChangeInfo {
|
||||||
|
change_id: 138051,
|
||||||
|
severity: ChangeSeverity::Info,
|
||||||
|
summary: "There is now a new `gcc` config section that can be used to download GCC from CI using `gcc.download-ci-gcc = true`",
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
|
@ -198,8 +198,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
||||||
}
|
}
|
||||||
AllocKind::Dead => unreachable!(),
|
AllocKind::Dead => unreachable!(),
|
||||||
};
|
};
|
||||||
// Ensure this pointer's provenance is exposed, so that it can be used by FFI code.
|
// We don't have to expose this pointer yet, we do that in `prepare_for_native_call`.
|
||||||
return interp_ok(base_ptr.expose_provenance().try_into().unwrap());
|
return interp_ok(base_ptr.addr().try_into().unwrap());
|
||||||
}
|
}
|
||||||
// We are not in native lib mode, so we control the addresses ourselves.
|
// We are not in native lib mode, so we control the addresses ourselves.
|
||||||
if let Some((reuse_addr, clock)) = global_state.reuse.take_addr(
|
if let Some((reuse_addr, clock)) = global_state.reuse.take_addr(
|
||||||
|
|
|
@ -266,7 +266,7 @@ fn imm_to_carg<'tcx>(v: &ImmTy<'tcx>, cx: &impl HasDataLayout) -> InterpResult<'
|
||||||
CArg::USize(v.to_scalar().to_target_usize(cx)?.try_into().unwrap()),
|
CArg::USize(v.to_scalar().to_target_usize(cx)?.try_into().unwrap()),
|
||||||
ty::RawPtr(..) => {
|
ty::RawPtr(..) => {
|
||||||
let s = v.to_scalar().to_pointer(cx)?.addr();
|
let s = v.to_scalar().to_pointer(cx)?.addr();
|
||||||
// This relies on the `expose_provenance` in `addr_from_alloc_id`.
|
// This relies on the `expose_provenance` in `prepare_for_native_call`.
|
||||||
CArg::RawPtr(std::ptr::with_exposed_provenance_mut(s.bytes_usize()))
|
CArg::RawPtr(std::ptr::with_exposed_provenance_mut(s.bytes_usize()))
|
||||||
}
|
}
|
||||||
_ => throw_unsup_format!("unsupported argument type for native call: {}", v.layout.ty),
|
_ => throw_unsup_format!("unsupported argument type for native call: {}", v.layout.ty),
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
//@ known-bug: #137865
|
|
||||||
trait Foo {
|
|
||||||
type Assoc<const N: Self>;
|
|
||||||
fn foo() -> Self::Assoc<3>;
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
//@ known-bug: #138048
|
|
||||||
struct Foo;
|
|
||||||
|
|
||||||
impl<'b> Foo {
|
|
||||||
fn bar<const V: u8>() {
|
|
||||||
let V;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -12,41 +12,40 @@ use std::autodiff::autodiff;
|
||||||
// We can't use Duplicated on scalars
|
// We can't use Duplicated on scalars
|
||||||
#[autodiff(df1, Reverse, Duplicated)]
|
#[autodiff(df1, Reverse, Duplicated)]
|
||||||
pub fn f1(x: f64) {
|
pub fn f1(x: f64) {
|
||||||
//~^ ERROR Duplicated can not be used for this type
|
//~^ ERROR Duplicated can not be used for this type
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Too many activities
|
// Too many activities
|
||||||
#[autodiff(df3, Reverse, Duplicated, Const)]
|
#[autodiff(df3, Reverse, Duplicated, Const)]
|
||||||
pub fn f3(x: f64) {
|
pub fn f3(x: f64) {
|
||||||
//~^^ ERROR expected 1 activities, but found 2
|
//~^^ ERROR expected 1 activities, but found 2
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// To few activities
|
// To few activities
|
||||||
#[autodiff(df4, Reverse)]
|
#[autodiff(df4, Reverse)]
|
||||||
pub fn f4(x: f64) {
|
pub fn f4(x: f64) {
|
||||||
//~^^ ERROR expected 1 activities, but found 0
|
//~^^ ERROR expected 1 activities, but found 0
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// We can't use Dual in Reverse mode
|
// We can't use Dual in Reverse mode
|
||||||
#[autodiff(df5, Reverse, Dual)]
|
#[autodiff(df5, Reverse, Dual)]
|
||||||
pub fn f5(x: f64) {
|
pub fn f5(x: f64) {
|
||||||
//~^^ ERROR Dual can not be used in Reverse Mode
|
//~^^ ERROR Dual can not be used in Reverse Mode
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// We can't use Duplicated in Forward mode
|
// We can't use Duplicated in Forward mode
|
||||||
#[autodiff(df6, Forward, Duplicated)]
|
#[autodiff(df6, Forward, Duplicated)]
|
||||||
pub fn f6(x: f64) {
|
pub fn f6(x: f64) {
|
||||||
//~^^ ERROR Duplicated can not be used in Forward Mode
|
//~^^ ERROR Duplicated can not be used in Forward Mode
|
||||||
//~^^ ERROR Duplicated can not be used for this type
|
//~^^ ERROR Duplicated can not be used for this type
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dummy() {
|
fn dummy() {
|
||||||
|
|
||||||
#[autodiff(df7, Forward, Dual)]
|
#[autodiff(df7, Forward, Dual)]
|
||||||
let mut x = 5;
|
let mut x = 5;
|
||||||
//~^ ERROR autodiff must be applied to function
|
//~^ ERROR autodiff must be applied to function
|
||||||
|
@ -64,21 +63,21 @@ fn dummy() {
|
||||||
// Malformed, where args?
|
// Malformed, where args?
|
||||||
#[autodiff]
|
#[autodiff]
|
||||||
pub fn f7(x: f64) {
|
pub fn f7(x: f64) {
|
||||||
//~^ ERROR autodiff must be applied to function
|
//~^ ERROR autodiff must be applied to function
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Malformed, where args?
|
// Malformed, where args?
|
||||||
#[autodiff()]
|
#[autodiff()]
|
||||||
pub fn f8(x: f64) {
|
pub fn f8(x: f64) {
|
||||||
//~^ ERROR autodiff requires at least a name and mode
|
//~^ ERROR autodiff requires at least a name and mode
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Invalid attribute syntax
|
// Invalid attribute syntax
|
||||||
#[autodiff = ""]
|
#[autodiff = ""]
|
||||||
pub fn f9(x: f64) {
|
pub fn f9(x: f64) {
|
||||||
//~^ ERROR autodiff must be applied to function
|
//~^ ERROR autodiff must be applied to function
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,21 +86,21 @@ fn fn_exists() {}
|
||||||
// We colide with an already existing function
|
// We colide with an already existing function
|
||||||
#[autodiff(fn_exists, Reverse, Active)]
|
#[autodiff(fn_exists, Reverse, Active)]
|
||||||
pub fn f10(x: f64) {
|
pub fn f10(x: f64) {
|
||||||
//~^^ ERROR the name `fn_exists` is defined multiple times [E0428]
|
//~^^ ERROR the name `fn_exists` is defined multiple times [E0428]
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Malformed, missing a mode
|
// Malformed, missing a mode
|
||||||
#[autodiff(df11)]
|
#[autodiff(df11)]
|
||||||
pub fn f11() {
|
pub fn f11() {
|
||||||
//~^ ERROR autodiff requires at least a name and mode
|
//~^ ERROR autodiff requires at least a name and mode
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Invalid Mode
|
// Invalid Mode
|
||||||
#[autodiff(df12, Debug)]
|
#[autodiff(df12, Debug)]
|
||||||
pub fn f12() {
|
pub fn f12() {
|
||||||
//~^^ ERROR unknown Mode: `Debug`. Use `Forward` or `Reverse`
|
//~^^ ERROR unknown Mode: `Debug`. Use `Forward` or `Reverse`
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,7 +108,7 @@ pub fn f12() {
|
||||||
// or use two autodiff macros.
|
// or use two autodiff macros.
|
||||||
#[autodiff(df13, Forward, Reverse)]
|
#[autodiff(df13, Forward, Reverse)]
|
||||||
pub fn f13() {
|
pub fn f13() {
|
||||||
//~^^ ERROR did not recognize Activity: `Reverse`
|
//~^^ ERROR did not recognize Activity: `Reverse`
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -130,7 +129,7 @@ type MyFloat = f32;
|
||||||
// like THIR which has type information available.
|
// like THIR which has type information available.
|
||||||
#[autodiff(df15, Reverse, Active, Active)]
|
#[autodiff(df15, Reverse, Active, Active)]
|
||||||
fn f15(x: MyFloat) -> f32 {
|
fn f15(x: MyFloat) -> f32 {
|
||||||
//~^^ ERROR failed to resolve: use of undeclared type `MyFloat` [E0433]
|
//~^^ ERROR failed to resolve: use of undeclared type `MyFloat` [E0433]
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,7 +140,9 @@ fn f16(x: f32) -> MyFloat {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(transparent)]
|
#[repr(transparent)]
|
||||||
struct F64Trans { inner: f64 }
|
struct F64Trans {
|
||||||
|
inner: f64,
|
||||||
|
}
|
||||||
|
|
||||||
// We would like to support `#[repr(transparent)]` f32/f64 wrapper in return type in the future
|
// We would like to support `#[repr(transparent)]` f32/f64 wrapper in return type in the future
|
||||||
#[autodiff(df17, Reverse, Active, Active)]
|
#[autodiff(df17, Reverse, Active, Active)]
|
||||||
|
@ -156,5 +157,24 @@ fn f18(x: F64Trans) -> f64 {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Invalid return activity
|
||||||
|
#[autodiff(df19, Forward, Dual, Active)]
|
||||||
|
fn f19(x: f32) -> f32 {
|
||||||
|
//~^^ ERROR invalid return activity Active in Forward Mode
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[autodiff(df20, Reverse, Active, Dual)]
|
||||||
|
fn f20(x: f32) -> f32 {
|
||||||
|
//~^^ ERROR invalid return activity Dual in Reverse Mode
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Duplicated cannot be used as return activity
|
||||||
|
#[autodiff(df21, Reverse, Active, Duplicated)]
|
||||||
|
fn f21(x: f32) -> f32 {
|
||||||
|
//~^^ ERROR invalid return activity Duplicated in Reverse Mode
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
error[E0658]: attributes on expressions are experimental
|
error[E0658]: attributes on expressions are experimental
|
||||||
--> $DIR/autodiff_illegal.rs:54:5
|
--> $DIR/autodiff_illegal.rs:53:5
|
||||||
|
|
|
|
||||||
LL | #[autodiff(df7, Forward, Dual)]
|
LL | #[autodiff(df7, Forward, Dual)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
@ -53,25 +53,25 @@ LL | pub fn f6(x: f64) {
|
||||||
| ^^^
|
| ^^^
|
||||||
|
|
||||||
error: autodiff must be applied to function
|
error: autodiff must be applied to function
|
||||||
--> $DIR/autodiff_illegal.rs:51:5
|
--> $DIR/autodiff_illegal.rs:50:5
|
||||||
|
|
|
|
||||||
LL | let mut x = 5;
|
LL | let mut x = 5;
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: autodiff must be applied to function
|
error: autodiff must be applied to function
|
||||||
--> $DIR/autodiff_illegal.rs:55:5
|
--> $DIR/autodiff_illegal.rs:54:5
|
||||||
|
|
|
|
||||||
LL | x = x + 3;
|
LL | x = x + 3;
|
||||||
| ^
|
| ^
|
||||||
|
|
||||||
error: autodiff must be applied to function
|
error: autodiff must be applied to function
|
||||||
--> $DIR/autodiff_illegal.rs:60:5
|
--> $DIR/autodiff_illegal.rs:59:5
|
||||||
|
|
|
|
||||||
LL | let add_one_v2 = |x: u32| -> u32 { x + 1 };
|
LL | let add_one_v2 = |x: u32| -> u32 { x + 1 };
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: autodiff must be applied to function
|
error: autodiff must be applied to function
|
||||||
--> $DIR/autodiff_illegal.rs:66:1
|
--> $DIR/autodiff_illegal.rs:65:1
|
||||||
|
|
|
|
||||||
LL | / pub fn f7(x: f64) {
|
LL | / pub fn f7(x: f64) {
|
||||||
LL | |
|
LL | |
|
||||||
|
@ -80,7 +80,7 @@ LL | | }
|
||||||
| |_^
|
| |_^
|
||||||
|
|
||||||
error: autodiff requires at least a name and mode
|
error: autodiff requires at least a name and mode
|
||||||
--> $DIR/autodiff_illegal.rs:73:1
|
--> $DIR/autodiff_illegal.rs:72:1
|
||||||
|
|
|
|
||||||
LL | / pub fn f8(x: f64) {
|
LL | / pub fn f8(x: f64) {
|
||||||
LL | |
|
LL | |
|
||||||
|
@ -89,7 +89,7 @@ LL | | }
|
||||||
| |_^
|
| |_^
|
||||||
|
|
||||||
error: autodiff must be applied to function
|
error: autodiff must be applied to function
|
||||||
--> $DIR/autodiff_illegal.rs:80:1
|
--> $DIR/autodiff_illegal.rs:79:1
|
||||||
|
|
|
|
||||||
LL | / pub fn f9(x: f64) {
|
LL | / pub fn f9(x: f64) {
|
||||||
LL | |
|
LL | |
|
||||||
|
@ -98,7 +98,7 @@ LL | | }
|
||||||
| |_^
|
| |_^
|
||||||
|
|
||||||
error[E0428]: the name `fn_exists` is defined multiple times
|
error[E0428]: the name `fn_exists` is defined multiple times
|
||||||
--> $DIR/autodiff_illegal.rs:88:1
|
--> $DIR/autodiff_illegal.rs:87:1
|
||||||
|
|
|
|
||||||
LL | fn fn_exists() {}
|
LL | fn fn_exists() {}
|
||||||
| -------------- previous definition of the value `fn_exists` here
|
| -------------- previous definition of the value `fn_exists` here
|
||||||
|
@ -110,7 +110,7 @@ LL | #[autodiff(fn_exists, Reverse, Active)]
|
||||||
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: autodiff requires at least a name and mode
|
error: autodiff requires at least a name and mode
|
||||||
--> $DIR/autodiff_illegal.rs:96:1
|
--> $DIR/autodiff_illegal.rs:95:1
|
||||||
|
|
|
|
||||||
LL | / pub fn f11() {
|
LL | / pub fn f11() {
|
||||||
LL | |
|
LL | |
|
||||||
|
@ -119,19 +119,43 @@ LL | | }
|
||||||
| |_^
|
| |_^
|
||||||
|
|
||||||
error: unknown Mode: `Debug`. Use `Forward` or `Reverse`
|
error: unknown Mode: `Debug`. Use `Forward` or `Reverse`
|
||||||
--> $DIR/autodiff_illegal.rs:102:18
|
--> $DIR/autodiff_illegal.rs:101:18
|
||||||
|
|
|
|
||||||
LL | #[autodiff(df12, Debug)]
|
LL | #[autodiff(df12, Debug)]
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
|
||||||
error: did not recognize Activity: `Reverse`
|
error: did not recognize Activity: `Reverse`
|
||||||
--> $DIR/autodiff_illegal.rs:110:27
|
--> $DIR/autodiff_illegal.rs:109:27
|
||||||
|
|
|
|
||||||
LL | #[autodiff(df13, Forward, Reverse)]
|
LL | #[autodiff(df13, Forward, Reverse)]
|
||||||
| ^^^^^^^
|
| ^^^^^^^
|
||||||
|
|
||||||
|
error: invalid return activity Active in Forward Mode
|
||||||
|
--> $DIR/autodiff_illegal.rs:161:1
|
||||||
|
|
|
||||||
|
LL | #[autodiff(df19, Forward, Dual, Active)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
error: invalid return activity Dual in Reverse Mode
|
||||||
|
--> $DIR/autodiff_illegal.rs:167:1
|
||||||
|
|
|
||||||
|
LL | #[autodiff(df20, Reverse, Active, Dual)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
|
error: invalid return activity Duplicated in Reverse Mode
|
||||||
|
--> $DIR/autodiff_illegal.rs:174:1
|
||||||
|
|
|
||||||
|
LL | #[autodiff(df21, Reverse, Active, Duplicated)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error[E0433]: failed to resolve: use of undeclared type `MyFloat`
|
error[E0433]: failed to resolve: use of undeclared type `MyFloat`
|
||||||
--> $DIR/autodiff_illegal.rs:131:1
|
--> $DIR/autodiff_illegal.rs:130:1
|
||||||
|
|
|
|
||||||
LL | #[autodiff(df15, Reverse, Active, Active)]
|
LL | #[autodiff(df15, Reverse, Active, Active)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of undeclared type `MyFloat`
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of undeclared type `MyFloat`
|
||||||
|
@ -139,14 +163,14 @@ LL | #[autodiff(df15, Reverse, Active, Active)]
|
||||||
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error[E0433]: failed to resolve: use of undeclared type `F64Trans`
|
error[E0433]: failed to resolve: use of undeclared type `F64Trans`
|
||||||
--> $DIR/autodiff_illegal.rs:153:1
|
--> $DIR/autodiff_illegal.rs:154:1
|
||||||
|
|
|
|
||||||
LL | #[autodiff(df18, Reverse, Active, Active)]
|
LL | #[autodiff(df18, Reverse, Active, Active)]
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of undeclared type `F64Trans`
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of undeclared type `F64Trans`
|
||||||
|
|
|
|
||||||
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the attribute macro `autodiff` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
||||||
error: aborting due to 19 previous errors
|
error: aborting due to 22 previous errors
|
||||||
|
|
||||||
Some errors have detailed explanations: E0428, E0433, E0658.
|
Some errors have detailed explanations: E0428, E0433, E0658.
|
||||||
For more information about an error, try `rustc --explain E0428`.
|
For more information about an error, try `rustc --explain E0428`.
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
struct Foo<'a>(&'a ());
|
||||||
|
|
||||||
|
// We need a lifetime in scope or else we do not write a user type annotation as a fast-path.
|
||||||
|
impl<'a> Foo<'a> {
|
||||||
|
fn bar<const V: u8>() {
|
||||||
|
let V;
|
||||||
|
//~^ ERROR constant parameters cannot be referenced in patterns
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,11 @@
|
||||||
|
error[E0158]: constant parameters cannot be referenced in patterns
|
||||||
|
--> $DIR/bad-param-in-pat.rs:6:13
|
||||||
|
|
|
||||||
|
LL | fn bar<const V: u8>() {
|
||||||
|
| ----------- constant defined here
|
||||||
|
LL | let V;
|
||||||
|
| ^ can't be used in patterns
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0158`.
|
|
@ -0,0 +1,25 @@
|
||||||
|
warning: the feature `generic_const_parameter_types` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
|
--> $DIR/references-parent-generics.rs:3:27
|
||||||
|
|
|
||||||
|
LL | #![cfg_attr(feat, feature(generic_const_parameter_types))]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #137626 <https://github.com/rust-lang/rust/issues/137626> for more information
|
||||||
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
|
error: `Self` is forbidden as the type of a const generic parameter
|
||||||
|
--> $DIR/references-parent-generics.rs:7:25
|
||||||
|
|
|
||||||
|
LL | type Assoc<const N: Self>;
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
= note: the only supported types are integers, `bool`, and `char`
|
||||||
|
|
||||||
|
error: anonymous constants referencing generics are not yet supported
|
||||||
|
--> $DIR/references-parent-generics.rs:14:21
|
||||||
|
|
|
||||||
|
LL | let x: T::Assoc<3>;
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors; 1 warning emitted
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
error: `Self` is forbidden as the type of a const generic parameter
|
||||||
|
--> $DIR/references-parent-generics.rs:7:25
|
||||||
|
|
|
||||||
|
LL | type Assoc<const N: Self>;
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
= note: the only supported types are integers, `bool`, and `char`
|
||||||
|
|
||||||
|
error: anonymous constants referencing generics are not yet supported
|
||||||
|
--> $DIR/references-parent-generics.rs:14:21
|
||||||
|
|
|
||||||
|
LL | let x: T::Assoc<3>;
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: aborting due to 2 previous errors
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
//@ revisions: feat nofeat
|
||||||
|
|
||||||
|
#![cfg_attr(feat, feature(generic_const_parameter_types))]
|
||||||
|
//[feat]~^ WARN the feature `generic_const_parameter_types` is incomplete
|
||||||
|
|
||||||
|
trait Foo {
|
||||||
|
type Assoc<const N: Self>;
|
||||||
|
//~^ ERROR `Self` is forbidden as the type of a const generic parameter
|
||||||
|
}
|
||||||
|
|
||||||
|
fn foo<T: Foo>() {
|
||||||
|
// We used to end up feeding the type of this anon const to be `T`, but the anon const
|
||||||
|
// doesn't inherit the generics of `foo`, which led to index oob errors.
|
||||||
|
let x: T::Assoc<3>;
|
||||||
|
//~^ ERROR anonymous constants referencing generics are not yet supported
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
136
tests/ui/type/pattern_types/literals.rs
Normal file
136
tests/ui/type/pattern_types/literals.rs
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
//! Check where literals can be used to initialize pattern types and where not.
|
||||||
|
|
||||||
|
#![feature(pattern_types, const_trait_impl, pattern_type_range_trait)]
|
||||||
|
#![feature(pattern_type_macro)]
|
||||||
|
|
||||||
|
use std::pat::pattern_type;
|
||||||
|
|
||||||
|
fn out_of_range() -> pattern_type!(u32 is 1..) {
|
||||||
|
0
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn at_range_start() -> pattern_type!(u32 is 1..) {
|
||||||
|
1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn in_range() -> pattern_type!(u32 is 1..) {
|
||||||
|
2
|
||||||
|
}
|
||||||
|
|
||||||
|
fn negative_lit_on_unsigned_ty() -> pattern_type!(u32 is 1..) {
|
||||||
|
-3
|
||||||
|
//~^ ERROR: cannot apply unary operator `-` to type `(u32) is 1..`
|
||||||
|
}
|
||||||
|
|
||||||
|
fn negative_lit_in_range() -> pattern_type!(i8 is -5..5) {
|
||||||
|
-2
|
||||||
|
//~^ ERROR: cannot apply unary operator `-` to type `(i8) is -5..=4`
|
||||||
|
}
|
||||||
|
|
||||||
|
fn positive_lit_in_range_of_signed() -> pattern_type!(i8 is -5..5) {
|
||||||
|
2
|
||||||
|
}
|
||||||
|
|
||||||
|
fn negative_lit_at_range_start() -> pattern_type!(i8 is -5..5) {
|
||||||
|
-5
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn positive_lit_at_range_end() -> pattern_type!(i8 is -5..5) {
|
||||||
|
4
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_one_beyond_range_end() -> pattern_type!(i8 is -5..5) {
|
||||||
|
5
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wrong_lit_kind() -> pattern_type!(u32 is 1..) {
|
||||||
|
'3'
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn char_lit_in_range() -> pattern_type!(char is 'a'..'z') {
|
||||||
|
'b'
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn char_lit_out_of_range() -> pattern_type!(char is 'a'..'z') {
|
||||||
|
'A'
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_at_unsigned_range_inclusive_end() -> pattern_type!(u32 is 0..=1) {
|
||||||
|
1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn single_element_range() -> pattern_type!(u32 is 0..=0) {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_oob_single_element_range() -> pattern_type!(u32 is 0..=0) {
|
||||||
|
1
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_oob_single_element_range_exclusive() -> pattern_type!(u32 is 0..1) {
|
||||||
|
1
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn single_element_range_exclusive() -> pattern_type!(u32 is 0..1) {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_range_at_base_type_min() -> pattern_type!(u32 is 0..0) {
|
||||||
|
//~^ evaluation of constant value failed
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_range_at_base_type_min2() -> pattern_type!(u32 is 0..0) {
|
||||||
|
//~^ evaluation of constant value failed
|
||||||
|
1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_range() -> pattern_type!(u32 is 1..1) {
|
||||||
|
0
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_range2() -> pattern_type!(u32 is 1..1) {
|
||||||
|
1
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wraparound_range_at_base_ty_end() -> pattern_type!(u32 is 1..0) {
|
||||||
|
//~^ evaluation of constant value failed
|
||||||
|
1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wraparound_range_at_base_ty_end2() -> pattern_type!(u32 is 1..0) {
|
||||||
|
//~^ evaluation of constant value failed
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wraparound_range_at_base_ty_end3() -> pattern_type!(u32 is 1..0) {
|
||||||
|
//~^ evaluation of constant value failed
|
||||||
|
2
|
||||||
|
}
|
||||||
|
|
||||||
|
fn wraparound_range() -> pattern_type!(u32 is 2..1) {
|
||||||
|
1
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_in_wraparound_range() -> pattern_type!(u32 is 2..1) {
|
||||||
|
0
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lit_at_wraparound_range_start() -> pattern_type!(u32 is 2..1) {
|
||||||
|
2
|
||||||
|
//~^ mismatched types
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
193
tests/ui/type/pattern_types/literals.stderr
Normal file
193
tests/ui/type/pattern_types/literals.stderr
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/literals.rs:86:62
|
||||||
|
|
|
||||||
|
LL | fn empty_range_at_base_type_min() -> pattern_type!(u32 is 0..0) {
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/literals.rs:91:63
|
||||||
|
|
|
||||||
|
LL | fn empty_range_at_base_type_min2() -> pattern_type!(u32 is 0..0) {
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/literals.rs:106:65
|
||||||
|
|
|
||||||
|
LL | fn wraparound_range_at_base_ty_end() -> pattern_type!(u32 is 1..0) {
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/literals.rs:111:66
|
||||||
|
|
|
||||||
|
LL | fn wraparound_range_at_base_ty_end2() -> pattern_type!(u32 is 1..0) {
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/literals.rs:116:66
|
||||||
|
|
|
||||||
|
LL | fn wraparound_range_at_base_ty_end3() -> pattern_type!(u32 is 1..0) {
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:9:5
|
||||||
|
|
|
||||||
|
LL | fn out_of_range() -> pattern_type!(u32 is 1..) {
|
||||||
|
| ------------------------- expected `(u32) is 1..` because of return type
|
||||||
|
LL | 0
|
||||||
|
| ^ expected `(u32) is 1..`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 1..`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0600]: cannot apply unary operator `-` to type `(u32) is 1..`
|
||||||
|
--> $DIR/literals.rs:22:5
|
||||||
|
|
|
||||||
|
LL | -3
|
||||||
|
| ^^ cannot apply unary operator `-`
|
||||||
|
|
||||||
|
error[E0600]: cannot apply unary operator `-` to type `(i8) is -5..=4`
|
||||||
|
--> $DIR/literals.rs:27:5
|
||||||
|
|
|
||||||
|
LL | -2
|
||||||
|
| ^^ cannot apply unary operator `-`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:36:5
|
||||||
|
|
|
||||||
|
LL | fn negative_lit_at_range_start() -> pattern_type!(i8 is -5..5) {
|
||||||
|
| -------------------------- expected `(i8) is -5..=4` because of return type
|
||||||
|
LL | -5
|
||||||
|
| ^^ expected `(i8) is -5..=4`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(i8) is -5..=4`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:45:5
|
||||||
|
|
|
||||||
|
LL | fn lit_one_beyond_range_end() -> pattern_type!(i8 is -5..5) {
|
||||||
|
| -------------------------- expected `(i8) is -5..=4` because of return type
|
||||||
|
LL | 5
|
||||||
|
| ^ expected `(i8) is -5..=4`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(i8) is -5..=4`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:50:5
|
||||||
|
|
|
||||||
|
LL | fn wrong_lit_kind() -> pattern_type!(u32 is 1..) {
|
||||||
|
| ------------------------- expected `(u32) is 1..` because of return type
|
||||||
|
LL | '3'
|
||||||
|
| ^^^ expected `(u32) is 1..`, found `char`
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 1..`
|
||||||
|
found type `char`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:55:5
|
||||||
|
|
|
||||||
|
LL | fn char_lit_in_range() -> pattern_type!(char is 'a'..'z') {
|
||||||
|
| ------------------------------- expected `(char) is 'a'..='y'` because of return type
|
||||||
|
LL | 'b'
|
||||||
|
| ^^^ expected `(char) is 'a'..='y'`, found `char`
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(char) is 'a'..='y'`
|
||||||
|
found type `char`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:60:5
|
||||||
|
|
|
||||||
|
LL | fn char_lit_out_of_range() -> pattern_type!(char is 'a'..'z') {
|
||||||
|
| ------------------------------- expected `(char) is 'a'..='y'` because of return type
|
||||||
|
LL | 'A'
|
||||||
|
| ^^^ expected `(char) is 'a'..='y'`, found `char`
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(char) is 'a'..='y'`
|
||||||
|
found type `char`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:73:5
|
||||||
|
|
|
||||||
|
LL | fn lit_oob_single_element_range() -> pattern_type!(u32 is 0..=0) {
|
||||||
|
| --------------------------- expected `(u32) is 0..=0` because of return type
|
||||||
|
LL | 1
|
||||||
|
| ^ expected `(u32) is 0..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 0..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:78:5
|
||||||
|
|
|
||||||
|
LL | fn lit_oob_single_element_range_exclusive() -> pattern_type!(u32 is 0..1) {
|
||||||
|
| -------------------------- expected `(u32) is 0..=0` because of return type
|
||||||
|
LL | 1
|
||||||
|
| ^ expected `(u32) is 0..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 0..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error: pattern type ranges cannot wrap: 1..=0
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:97:5
|
||||||
|
|
|
||||||
|
LL | fn empty_range() -> pattern_type!(u32 is 1..1) {
|
||||||
|
| -------------------------- expected `(u32) is 1..=0` because of return type
|
||||||
|
LL | 0
|
||||||
|
| ^ expected `(u32) is 1..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 1..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:102:5
|
||||||
|
|
|
||||||
|
LL | fn empty_range2() -> pattern_type!(u32 is 1..1) {
|
||||||
|
| -------------------------- expected `(u32) is 1..=0` because of return type
|
||||||
|
LL | 1
|
||||||
|
| ^ expected `(u32) is 1..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 1..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error: pattern type ranges cannot wrap: 2..=0
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:122:5
|
||||||
|
|
|
||||||
|
LL | fn wraparound_range() -> pattern_type!(u32 is 2..1) {
|
||||||
|
| -------------------------- expected `(u32) is 2..=0` because of return type
|
||||||
|
LL | 1
|
||||||
|
| ^ expected `(u32) is 2..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 2..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:127:5
|
||||||
|
|
|
||||||
|
LL | fn lit_in_wraparound_range() -> pattern_type!(u32 is 2..1) {
|
||||||
|
| -------------------------- expected `(u32) is 2..=0` because of return type
|
||||||
|
LL | 0
|
||||||
|
| ^ expected `(u32) is 2..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 2..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error[E0308]: mismatched types
|
||||||
|
--> $DIR/literals.rs:132:5
|
||||||
|
|
|
||||||
|
LL | fn lit_at_wraparound_range_start() -> pattern_type!(u32 is 2..1) {
|
||||||
|
| -------------------------- expected `(u32) is 2..=0` because of return type
|
||||||
|
LL | 2
|
||||||
|
| ^ expected `(u32) is 2..=0`, found integer
|
||||||
|
|
|
||||||
|
= note: expected pattern type `(u32) is 2..=0`
|
||||||
|
found type `{integer}`
|
||||||
|
|
||||||
|
error: aborting due to 22 previous errors
|
||||||
|
|
||||||
|
Some errors have detailed explanations: E0080, E0308, E0600.
|
||||||
|
For more information about an error, try `rustc --explain E0080`.
|
|
@ -1,6 +1,6 @@
|
||||||
//! Check that pattern types can only have specific base types
|
//! Check that pattern types can only have specific base types
|
||||||
|
|
||||||
#![feature(pattern_types)]
|
#![feature(pattern_types, const_trait_impl, pattern_type_range_trait)]
|
||||||
#![feature(pattern_type_macro)]
|
#![feature(pattern_type_macro)]
|
||||||
|
|
||||||
use std::pat::pattern_type;
|
use std::pat::pattern_type;
|
||||||
|
@ -14,7 +14,7 @@ const BAD_NESTING: pattern_type!(pattern_type!(u32 is 1..) is 0..) = todo!();
|
||||||
// We want to get the most narrowest version that a pattern could be
|
// We want to get the most narrowest version that a pattern could be
|
||||||
const BAD_NESTING2: pattern_type!(pattern_type!(i32 is 1..) is ..=-1) = todo!();
|
const BAD_NESTING2: pattern_type!(pattern_type!(i32 is 1..) is ..=-1) = todo!();
|
||||||
//~^ ERROR: not a valid base type for range patterns
|
//~^ ERROR: not a valid base type for range patterns
|
||||||
//~| ERROR: mismatched types
|
//~| ERROR: cannot apply unary operator `-` to type `(i32) is 1..`
|
||||||
|
|
||||||
const BAD_NESTING3: pattern_type!(pattern_type!(i32 is 1..) is ..0) = todo!();
|
const BAD_NESTING3: pattern_type!(pattern_type!(i32 is 1..) is ..0) = todo!();
|
||||||
//~^ ERROR: not a valid base type for range patterns
|
//~^ ERROR: not a valid base type for range patterns
|
||||||
|
|
|
@ -43,14 +43,11 @@ LL | const BAD_NESTING2: pattern_type!(pattern_type!(i32 is 1..) is ..=-1) = tod
|
||||||
u128
|
u128
|
||||||
and 5 others
|
and 5 others
|
||||||
|
|
||||||
error[E0308]: mismatched types
|
error[E0600]: cannot apply unary operator `-` to type `(i32) is 1..`
|
||||||
--> $DIR/nested.rs:15:67
|
--> $DIR/nested.rs:15:67
|
||||||
|
|
|
|
||||||
LL | const BAD_NESTING2: pattern_type!(pattern_type!(i32 is 1..) is ..=-1) = todo!();
|
LL | const BAD_NESTING2: pattern_type!(pattern_type!(i32 is 1..) is ..=-1) = todo!();
|
||||||
| ^^ expected `(i32) is 1..`, found integer
|
| ^^ cannot apply unary operator `-`
|
||||||
|
|
|
||||||
= note: expected pattern type `(i32) is 1..`
|
|
||||||
found type `{integer}`
|
|
||||||
|
|
||||||
error[E0277]: `(i32) is 1..` is not a valid base type for range patterns
|
error[E0277]: `(i32) is 1..` is not a valid base type for range patterns
|
||||||
--> $DIR/nested.rs:19:35
|
--> $DIR/nested.rs:19:35
|
||||||
|
@ -180,5 +177,5 @@ LL | const BAD_NESTING5: pattern_type!(f32 is 1.0 .. 2.0) = todo!();
|
||||||
|
|
||||||
error: aborting due to 11 previous errors
|
error: aborting due to 11 previous errors
|
||||||
|
|
||||||
Some errors have detailed explanations: E0277, E0308.
|
Some errors have detailed explanations: E0277, E0308, E0600.
|
||||||
For more information about an error, try `rustc --explain E0277`.
|
For more information about an error, try `rustc --explain E0277`.
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#![feature(pattern_types, rustc_attrs)]
|
#![feature(pattern_types, rustc_attrs, const_trait_impl, pattern_type_range_trait)]
|
||||||
#![feature(pattern_type_macro)]
|
#![feature(pattern_type_macro)]
|
||||||
#![allow(incomplete_features)]
|
#![allow(incomplete_features)]
|
||||||
|
|
||||||
|
@ -18,6 +18,25 @@ type A = Option<std::num::NonZeroU32>; //~ ERROR layout_of
|
||||||
#[rustc_layout(debug)]
|
#[rustc_layout(debug)]
|
||||||
struct NonZeroU32New(pattern_type!(u32 is 1..)); //~ ERROR layout_of
|
struct NonZeroU32New(pattern_type!(u32 is 1..)); //~ ERROR layout_of
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type EMPTY = pattern_type!(u32 is 1..1); //~ ERROR unknown layout
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type WRAP = pattern_type!(u32 is 1..0); //~ ERROR unknown layout
|
||||||
|
//~^ ERROR: evaluation of constant value failed
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type WRAP2 = pattern_type!(u32 is 5..2); //~ ERROR unknown layout
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type SIGN = pattern_type!(i8 is -10..=10); //~ ERROR layout_of
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type MIN = pattern_type!(i8 is -128..=0); //~ ERROR layout_of
|
||||||
|
|
||||||
|
#[rustc_layout(debug)]
|
||||||
|
type SignedWrap = pattern_type!(i8 is 120..=-120); //~ ERROR unknown layout
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let x: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(42_u32) };
|
let x: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(42_u32) };
|
||||||
}
|
}
|
||||||
|
|
|
@ -357,5 +357,120 @@ error: layout_of(NonZeroU32New) = Layout {
|
||||||
LL | struct NonZeroU32New(pattern_type!(u32 is 1..));
|
LL | struct NonZeroU32New(pattern_type!(u32 is 1..));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: aborting due to 5 previous errors
|
error: pattern type ranges cannot wrap: 1..=0
|
||||||
|
|
||||||
|
error: the type has an unknown layout
|
||||||
|
--> $DIR/range_patterns.rs:22:1
|
||||||
|
|
|
||||||
|
LL | type EMPTY = pattern_type!(u32 is 1..1);
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
|
||||||
|
error[E0080]: evaluation of constant value failed
|
||||||
|
--> $DIR/range_patterns.rs:25:37
|
||||||
|
|
|
||||||
|
LL | type WRAP = pattern_type!(u32 is 1..0);
|
||||||
|
| ^ evaluation panicked: exclusive range end at minimum value of type
|
||||||
|
|
||||||
|
error: the type has an unknown layout
|
||||||
|
--> $DIR/range_patterns.rs:25:1
|
||||||
|
|
|
||||||
|
LL | type WRAP = pattern_type!(u32 is 1..0);
|
||||||
|
| ^^^^^^^^^
|
||||||
|
|
||||||
|
error: pattern type ranges cannot wrap: 5..=1
|
||||||
|
|
||||||
|
error: the type has an unknown layout
|
||||||
|
--> $DIR/range_patterns.rs:29:1
|
||||||
|
|
|
||||||
|
LL | type WRAP2 = pattern_type!(u32 is 5..2);
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
|
||||||
|
error: layout_of((i8) is -10..=10) = Layout {
|
||||||
|
size: Size(1 bytes),
|
||||||
|
align: AbiAndPrefAlign {
|
||||||
|
abi: Align(1 bytes),
|
||||||
|
pref: $SOME_ALIGN,
|
||||||
|
},
|
||||||
|
backend_repr: Scalar(
|
||||||
|
Initialized {
|
||||||
|
value: Int(
|
||||||
|
I8,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
valid_range: (..=10) | (246..),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
fields: Primitive,
|
||||||
|
largest_niche: Some(
|
||||||
|
Niche {
|
||||||
|
offset: Size(0 bytes),
|
||||||
|
value: Int(
|
||||||
|
I8,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
valid_range: (..=10) | (246..),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
uninhabited: false,
|
||||||
|
variants: Single {
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
|
max_repr_align: None,
|
||||||
|
unadjusted_abi_align: Align(1 bytes),
|
||||||
|
randomization_seed: $SEED,
|
||||||
|
}
|
||||||
|
--> $DIR/range_patterns.rs:32:1
|
||||||
|
|
|
||||||
|
LL | type SIGN = pattern_type!(i8 is -10..=10);
|
||||||
|
| ^^^^^^^^^
|
||||||
|
|
||||||
|
error: layout_of((i8) is i8::MIN..=0) = Layout {
|
||||||
|
size: Size(1 bytes),
|
||||||
|
align: AbiAndPrefAlign {
|
||||||
|
abi: Align(1 bytes),
|
||||||
|
pref: $SOME_ALIGN,
|
||||||
|
},
|
||||||
|
backend_repr: Scalar(
|
||||||
|
Initialized {
|
||||||
|
value: Int(
|
||||||
|
I8,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
valid_range: (..=0) | (128..),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
fields: Primitive,
|
||||||
|
largest_niche: Some(
|
||||||
|
Niche {
|
||||||
|
offset: Size(0 bytes),
|
||||||
|
value: Int(
|
||||||
|
I8,
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
valid_range: (..=0) | (128..),
|
||||||
|
},
|
||||||
|
),
|
||||||
|
uninhabited: false,
|
||||||
|
variants: Single {
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
|
max_repr_align: None,
|
||||||
|
unadjusted_abi_align: Align(1 bytes),
|
||||||
|
randomization_seed: $SEED,
|
||||||
|
}
|
||||||
|
--> $DIR/range_patterns.rs:35:1
|
||||||
|
|
|
||||||
|
LL | type MIN = pattern_type!(i8 is -128..=0);
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
||||||
|
error: pattern type ranges cannot wrap: 120..=-120
|
||||||
|
|
||||||
|
error: the type has an unknown layout
|
||||||
|
--> $DIR/range_patterns.rs:38:1
|
||||||
|
|
|
||||||
|
LL | type SignedWrap = pattern_type!(i8 is 120..=-120);
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: aborting due to 15 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0080`.
|
||||||
|
|
|
@ -958,6 +958,9 @@ If appropriate, please update `CONFIG_CHANGE_HISTORY` in `src/bootstrap/src/util
|
||||||
[mentions."src/bootstrap/src/core/build_steps/llvm.rs"]
|
[mentions."src/bootstrap/src/core/build_steps/llvm.rs"]
|
||||||
message = "This PR changes how LLVM is built. Consider updating src/bootstrap/download-ci-llvm-stamp."
|
message = "This PR changes how LLVM is built. Consider updating src/bootstrap/download-ci-llvm-stamp."
|
||||||
|
|
||||||
|
[mentions."src/bootstrap/src/core/build_steps/gcc.rs"]
|
||||||
|
message = "This PR changes how GCC is built. Consider updating src/bootstrap/download-ci-gcc-stamp."
|
||||||
|
|
||||||
[mentions."tests/crashes"]
|
[mentions."tests/crashes"]
|
||||||
message = "This PR changes a file inside `tests/crashes`. If a crash was fixed, please move into the corresponding `ui` subdir and add 'Fixes #<issueNr>' to the PR description to autoclose the issue upon merge."
|
message = "This PR changes a file inside `tests/crashes`. If a crash was fixed, please move into the corresponding `ui` subdir and add 'Fixes #<issueNr>' to the PR description to autoclose the issue upon merge."
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue