Auto merge of #136332 - jhpratt:rollup-aa69d0e, r=jhpratt
Rollup of 9 pull requests Successful merges: - #132156 (When encountering unexpected closure return type, point at return type/expression) - #133429 (Autodiff Upstreaming - rustc_codegen_ssa, rustc_middle) - #136281 (`rustc_hir_analysis` cleanups) - #136297 (Fix a typo in profile-guided-optimization.md) - #136300 (atomic: extend compare_and_swap migration docs) - #136310 (normalize `*.long-type.txt` paths for compare-mode tests) - #136312 (Disable `overflow_delimited_expr` in edition 2024) - #136313 (Filter out RPITITs when suggesting unconstrained assoc type on too many generics) - #136323 (Fix a typo in conventions.md) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
7f36543a48
95 changed files with 1077 additions and 563 deletions
|
@ -4234,6 +4234,7 @@ name = "rustc_monomorphize"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_attr_parsing",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
|
@ -4243,6 +4244,7 @@ dependencies = [
|
|||
"rustc_middle",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_symbol_mangling",
|
||||
"rustc_target",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
|
|
@ -79,6 +79,7 @@ pub struct AutoDiffItem {
|
|||
pub target: String,
|
||||
pub attrs: AutoDiffAttrs,
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
pub struct AutoDiffAttrs {
|
||||
/// Conceptually either forward or reverse mode AD, as described in various autodiff papers and
|
||||
|
@ -231,7 +232,7 @@ impl AutoDiffAttrs {
|
|||
self.ret_activity == DiffActivity::ActiveOnly
|
||||
}
|
||||
|
||||
pub fn error() -> Self {
|
||||
pub const fn error() -> Self {
|
||||
AutoDiffAttrs {
|
||||
mode: DiffMode::Error,
|
||||
ret_activity: DiffActivity::None,
|
||||
|
|
|
@ -62,8 +62,8 @@ fn generate_enzyme_call<'ll>(
|
|||
// add outer_fn name to ad_name to make it unique, in case users apply autodiff to multiple
|
||||
// functions. Unwrap will only panic, if LLVM gave us an invalid string.
|
||||
let name = llvm::get_value_name(outer_fn);
|
||||
let outer_fn_name = std::ffi::CStr::from_bytes_with_nul(name).unwrap().to_str().unwrap();
|
||||
ad_name.push_str(outer_fn_name.to_string().as_str());
|
||||
let outer_fn_name = std::str::from_utf8(name).unwrap();
|
||||
ad_name.push_str(outer_fn_name);
|
||||
|
||||
// Let us assume the user wrote the following function square:
|
||||
//
|
||||
|
@ -255,14 +255,14 @@ fn generate_enzyme_call<'ll>(
|
|||
// have no debug info to copy, which would then be ok.
|
||||
trace!("no dbg info");
|
||||
}
|
||||
// Now that we copied the metadata, get rid of dummy code.
|
||||
llvm::LLVMRustEraseInstBefore(entry, last_inst);
|
||||
llvm::LLVMRustEraseInstFromParent(last_inst);
|
||||
|
||||
if cx.val_ty(outer_fn) != cx.type_void() {
|
||||
builder.ret(call);
|
||||
} else {
|
||||
// Now that we copied the metadata, get rid of dummy code.
|
||||
llvm::LLVMRustEraseInstUntilInclusive(entry, last_inst);
|
||||
|
||||
if cx.val_ty(call) == cx.type_void() {
|
||||
builder.ret_void();
|
||||
} else {
|
||||
builder.ret(call);
|
||||
}
|
||||
|
||||
// Let's crash in case that we messed something up above and generated invalid IR.
|
||||
|
|
|
@ -298,7 +298,7 @@ struct UsageSets<'tcx> {
|
|||
/// Prepare sets of definitions that are relevant to deciding whether something
|
||||
/// is an "unused function" for coverage purposes.
|
||||
fn prepare_usage_sets<'tcx>(tcx: TyCtxt<'tcx>) -> UsageSets<'tcx> {
|
||||
let MonoItemPartitions { all_mono_items, codegen_units } =
|
||||
let MonoItemPartitions { all_mono_items, codegen_units, .. } =
|
||||
tcx.collect_and_partition_mono_items(());
|
||||
|
||||
// Obtain a MIR body for each function participating in codegen, via an
|
||||
|
|
|
@ -7,11 +7,13 @@ use crate::llvm::Bool;
|
|||
extern "C" {
|
||||
// Enzyme
|
||||
pub fn LLVMRustHasMetadata(I: &Value, KindID: c_uint) -> bool;
|
||||
pub fn LLVMRustEraseInstBefore(BB: &BasicBlock, I: &Value);
|
||||
pub fn LLVMRustEraseInstUntilInclusive(BB: &BasicBlock, I: &Value);
|
||||
pub fn LLVMRustGetLastInstruction<'a>(BB: &BasicBlock) -> Option<&'a Value>;
|
||||
pub fn LLVMRustDIGetInstMetadata(I: &Value) -> Option<&Metadata>;
|
||||
pub fn LLVMRustEraseInstFromParent(V: &Value);
|
||||
pub fn LLVMRustGetTerminator<'a>(B: &BasicBlock) -> &'a Value;
|
||||
pub fn LLVMDumpModule(M: &Module);
|
||||
pub fn LLVMDumpValue(V: &Value);
|
||||
pub fn LLVMRustVerifyFunction(V: &Value, action: LLVMRustVerifierFailureAction) -> Bool;
|
||||
|
||||
pub fn LLVMGetFunctionCallConv(F: &Value) -> c_uint;
|
||||
|
|
|
@ -16,6 +16,8 @@ codegen_ssa_archive_build_failure = failed to build archive at `{$path}`: {$erro
|
|||
|
||||
codegen_ssa_atomic_compare_exchange = Atomic compare-exchange intrinsic missing failure memory ordering
|
||||
|
||||
codegen_ssa_autodiff_without_lto = using the autodiff feature requires using fat-lto
|
||||
|
||||
codegen_ssa_binary_output_to_tty = option `-o` or `--emit` is used to write binary output type `{$shorthand}` to stdout, but stdout is a tty
|
||||
|
||||
codegen_ssa_cgu_not_recorded =
|
||||
|
|
|
@ -7,6 +7,7 @@ use std::sync::mpsc::{Receiver, Sender, channel};
|
|||
use std::{fs, io, mem, str, thread};
|
||||
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::expand::autodiff_attrs::AutoDiffItem;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_data_structures::jobserver::{self, Acquired};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
|
@ -40,7 +41,7 @@ use tracing::debug;
|
|||
use super::link::{self, ensure_removed};
|
||||
use super::lto::{self, SerializedModule};
|
||||
use super::symbol_export::symbol_name_for_instance_in_crate;
|
||||
use crate::errors::ErrorCreatingRemarkDir;
|
||||
use crate::errors::{AutodiffWithoutLto, ErrorCreatingRemarkDir};
|
||||
use crate::traits::*;
|
||||
use crate::{
|
||||
CachedModuleCodegen, CodegenResults, CompiledModule, CrateInfo, ModuleCodegen, ModuleKind,
|
||||
|
@ -118,6 +119,7 @@ pub struct ModuleConfig {
|
|||
pub merge_functions: bool,
|
||||
pub emit_lifetime_markers: bool,
|
||||
pub llvm_plugins: Vec<String>,
|
||||
pub autodiff: Vec<config::AutoDiff>,
|
||||
}
|
||||
|
||||
impl ModuleConfig {
|
||||
|
@ -266,6 +268,7 @@ impl ModuleConfig {
|
|||
|
||||
emit_lifetime_markers: sess.emit_lifetime_markers(),
|
||||
llvm_plugins: if_regular!(sess.opts.unstable_opts.llvm_plugins.clone(), vec![]),
|
||||
autodiff: if_regular!(sess.opts.unstable_opts.autodiff.clone(), vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -389,6 +392,7 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
|
|||
|
||||
fn generate_lto_work<B: ExtraBackendMethods>(
|
||||
cgcx: &CodegenContext<B>,
|
||||
autodiff: Vec<AutoDiffItem>,
|
||||
needs_fat_lto: Vec<FatLtoInput<B>>,
|
||||
needs_thin_lto: Vec<(String, B::ThinBuffer)>,
|
||||
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>,
|
||||
|
@ -397,11 +401,19 @@ fn generate_lto_work<B: ExtraBackendMethods>(
|
|||
|
||||
if !needs_fat_lto.is_empty() {
|
||||
assert!(needs_thin_lto.is_empty());
|
||||
let module =
|
||||
let mut module =
|
||||
B::run_fat_lto(cgcx, needs_fat_lto, import_only_modules).unwrap_or_else(|e| e.raise());
|
||||
if cgcx.lto == Lto::Fat {
|
||||
let config = cgcx.config(ModuleKind::Regular);
|
||||
module = unsafe { module.autodiff(cgcx, autodiff, config).unwrap() };
|
||||
}
|
||||
// We are adding a single work item, so the cost doesn't matter.
|
||||
vec![(WorkItem::LTO(module), 0)]
|
||||
} else {
|
||||
if !autodiff.is_empty() {
|
||||
let dcx = cgcx.create_dcx();
|
||||
dcx.handle().emit_fatal(AutodiffWithoutLto {});
|
||||
}
|
||||
assert!(needs_fat_lto.is_empty());
|
||||
let (lto_modules, copy_jobs) = B::run_thin_lto(cgcx, needs_thin_lto, import_only_modules)
|
||||
.unwrap_or_else(|e| e.raise());
|
||||
|
@ -1021,6 +1033,9 @@ pub(crate) enum Message<B: WriteBackendMethods> {
|
|||
/// Sent from a backend worker thread.
|
||||
WorkItem { result: Result<WorkItemResult<B>, Option<WorkerFatalError>>, worker_id: usize },
|
||||
|
||||
/// A vector containing all the AutoDiff tasks that we have to pass to Enzyme.
|
||||
AddAutoDiffItems(Vec<AutoDiffItem>),
|
||||
|
||||
/// The frontend has finished generating something (backend IR or a
|
||||
/// post-LTO artifact) for a codegen unit, and it should be passed to the
|
||||
/// backend. Sent from the main thread.
|
||||
|
@ -1348,6 +1363,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
|
||||
// This is where we collect codegen units that have gone all the way
|
||||
// through codegen and LLVM.
|
||||
let mut autodiff_items = Vec::new();
|
||||
let mut compiled_modules = vec![];
|
||||
let mut compiled_allocator_module = None;
|
||||
let mut needs_link = Vec::new();
|
||||
|
@ -1459,9 +1475,13 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
let needs_thin_lto = mem::take(&mut needs_thin_lto);
|
||||
let import_only_modules = mem::take(&mut lto_import_only_modules);
|
||||
|
||||
for (work, cost) in
|
||||
generate_lto_work(&cgcx, needs_fat_lto, needs_thin_lto, import_only_modules)
|
||||
{
|
||||
for (work, cost) in generate_lto_work(
|
||||
&cgcx,
|
||||
autodiff_items.clone(),
|
||||
needs_fat_lto,
|
||||
needs_thin_lto,
|
||||
import_only_modules,
|
||||
) {
|
||||
let insertion_index = work_items
|
||||
.binary_search_by_key(&cost, |&(_, cost)| cost)
|
||||
.unwrap_or_else(|e| e);
|
||||
|
@ -1596,6 +1616,10 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
main_thread_state = MainThreadState::Idle;
|
||||
}
|
||||
|
||||
Message::AddAutoDiffItems(mut items) => {
|
||||
autodiff_items.append(&mut items);
|
||||
}
|
||||
|
||||
Message::CodegenComplete => {
|
||||
if codegen_state != Aborted {
|
||||
codegen_state = Completed;
|
||||
|
@ -2070,6 +2094,10 @@ impl<B: ExtraBackendMethods> OngoingCodegen<B> {
|
|||
drop(self.coordinator.sender.send(Box::new(Message::CodegenComplete::<B>)));
|
||||
}
|
||||
|
||||
pub(crate) fn submit_autodiff_items(&self, items: Vec<AutoDiffItem>) {
|
||||
drop(self.coordinator.sender.send(Box::new(Message::<B>::AddAutoDiffItems(items))));
|
||||
}
|
||||
|
||||
pub(crate) fn check_for_errors(&self, sess: &Session) {
|
||||
self.shared_emitter_main.check(sess, false);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ use rustc_middle::middle::debugger_visualizer::{DebuggerVisualizerFile, Debugger
|
|||
use rustc_middle::middle::exported_symbols::SymbolExportKind;
|
||||
use rustc_middle::middle::{exported_symbols, lang_items};
|
||||
use rustc_middle::mir::BinOp;
|
||||
use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem};
|
||||
use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem, MonoItemPartitions};
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
||||
|
@ -623,7 +623,9 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
|||
|
||||
// Run the monomorphization collector and partition the collected items into
|
||||
// codegen units.
|
||||
let codegen_units = tcx.collect_and_partition_mono_items(()).codegen_units;
|
||||
let MonoItemPartitions { codegen_units, autodiff_items, .. } =
|
||||
tcx.collect_and_partition_mono_items(());
|
||||
let autodiff_fncs = autodiff_items.to_vec();
|
||||
|
||||
// Force all codegen_unit queries so they are already either red or green
|
||||
// when compile_codegen_unit accesses them. We are not able to re-execute
|
||||
|
@ -694,6 +696,10 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
|||
);
|
||||
}
|
||||
|
||||
if !autodiff_fncs.is_empty() {
|
||||
ongoing_codegen.submit_autodiff_items(autodiff_fncs);
|
||||
}
|
||||
|
||||
// For better throughput during parallel processing by LLVM, we used to sort
|
||||
// CGUs largest to smallest. This would lead to better thread utilization
|
||||
// by, for example, preventing a large CGU from being processed last and
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use rustc_ast::attr::list_contains_name;
|
||||
use rustc_ast::{MetaItemInner, attr};
|
||||
use rustc_ast::expand::autodiff_attrs::{
|
||||
AutoDiffAttrs, DiffActivity, DiffMode, valid_input_activity, valid_ret_activity,
|
||||
};
|
||||
use rustc_ast::{MetaItem, MetaItemInner, attr};
|
||||
use rustc_attr_parsing::{InlineAttr, InstructionSetAttr, OptimizeAttr};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::codes::*;
|
||||
|
@ -13,6 +18,7 @@ use rustc_middle::middle::codegen_fn_attrs::{
|
|||
};
|
||||
use rustc_middle::mir::mono::Linkage;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{self as ty, TyCtxt};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_session::{Session, lint};
|
||||
|
@ -65,6 +71,13 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
|
|||
codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER;
|
||||
}
|
||||
|
||||
// If our rustc version supports autodiff/enzyme, then we call our handler
|
||||
// to check for any `#[rustc_autodiff(...)]` attributes.
|
||||
if cfg!(llvm_enzyme) {
|
||||
let ad = autodiff_attrs(tcx, did.into());
|
||||
codegen_fn_attrs.autodiff_item = ad;
|
||||
}
|
||||
|
||||
// When `no_builtins` is applied at the crate level, we should add the
|
||||
// `no-builtins` attribute to each function to ensure it takes effect in LTO.
|
||||
let crate_attrs = tcx.hir().attrs(rustc_hir::CRATE_HIR_ID);
|
||||
|
@ -856,6 +869,109 @@ impl<'a> MixedExportNameAndNoMangleState<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// We now check the #\[rustc_autodiff\] attributes which we generated from the #[autodiff(...)]
|
||||
/// macros. There are two forms. The pure one without args to mark primal functions (the functions
|
||||
/// being differentiated). The other form is #[rustc_autodiff(Mode, ActivityList)] on top of the
|
||||
/// placeholder functions. We wrote the rustc_autodiff attributes ourself, so this should never
|
||||
/// panic, unless we introduced a bug when parsing the autodiff macro.
|
||||
fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
||||
let attrs = tcx.get_attrs(id, sym::rustc_autodiff);
|
||||
|
||||
let attrs =
|
||||
attrs.filter(|attr| attr.name_or_empty() == sym::rustc_autodiff).collect::<Vec<_>>();
|
||||
|
||||
// check for exactly one autodiff attribute on placeholder functions.
|
||||
// There should only be one, since we generate a new placeholder per ad macro.
|
||||
// FIXME(ZuseZ4): re-enable this check. Currently we add multiple, which doesn't cause harm but
|
||||
// looks strange e.g. under cargo-expand.
|
||||
let attr = match &attrs[..] {
|
||||
[] => return None,
|
||||
[attr] => attr,
|
||||
// These two attributes are the same and unfortunately duplicated due to a previous bug.
|
||||
[attr, _attr2] => attr,
|
||||
_ => {
|
||||
//FIXME(ZuseZ4): Once we fixed our parser, we should also prohibit the two-attribute
|
||||
//branch above.
|
||||
span_bug!(attrs[1].span, "cg_ssa: rustc_autodiff should only exist once per source");
|
||||
}
|
||||
};
|
||||
|
||||
let list = attr.meta_item_list().unwrap_or_default();
|
||||
|
||||
// empty autodiff attribute macros (i.e. `#[autodiff]`) are used to mark source functions
|
||||
if list.is_empty() {
|
||||
return Some(AutoDiffAttrs::source());
|
||||
}
|
||||
|
||||
let [mode, input_activities @ .., ret_activity] = &list[..] else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain mode and activities");
|
||||
};
|
||||
let mode = if let MetaItemInner::MetaItem(MetaItem { path: ref p1, .. }) = mode {
|
||||
p1.segments.first().unwrap().ident
|
||||
} else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain mode");
|
||||
};
|
||||
|
||||
// parse mode
|
||||
let mode = match mode.as_str() {
|
||||
"Forward" => DiffMode::Forward,
|
||||
"Reverse" => DiffMode::Reverse,
|
||||
"ForwardFirst" => DiffMode::ForwardFirst,
|
||||
"ReverseFirst" => DiffMode::ReverseFirst,
|
||||
_ => {
|
||||
span_bug!(mode.span, "rustc_autodiff attribute contains invalid mode");
|
||||
}
|
||||
};
|
||||
|
||||
// First read the ret symbol from the attribute
|
||||
let ret_symbol = if let MetaItemInner::MetaItem(MetaItem { path: ref p1, .. }) = ret_activity {
|
||||
p1.segments.first().unwrap().ident
|
||||
} else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain the return activity");
|
||||
};
|
||||
|
||||
// Then parse it into an actual DiffActivity
|
||||
let Ok(ret_activity) = DiffActivity::from_str(ret_symbol.as_str()) else {
|
||||
span_bug!(ret_symbol.span, "invalid return activity");
|
||||
};
|
||||
|
||||
// Now parse all the intermediate (input) activities
|
||||
let mut arg_activities: Vec<DiffActivity> = vec![];
|
||||
for arg in input_activities {
|
||||
let arg_symbol = if let MetaItemInner::MetaItem(MetaItem { path: ref p2, .. }) = arg {
|
||||
match p2.segments.first() {
|
||||
Some(x) => x.ident,
|
||||
None => {
|
||||
span_bug!(
|
||||
arg.span(),
|
||||
"rustc_autodiff attribute must contain the input activity"
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
span_bug!(arg.span(), "rustc_autodiff attribute must contain the input activity");
|
||||
};
|
||||
|
||||
match DiffActivity::from_str(arg_symbol.as_str()) {
|
||||
Ok(arg_activity) => arg_activities.push(arg_activity),
|
||||
Err(_) => {
|
||||
span_bug!(arg_symbol.span, "invalid input activity");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for &input in &arg_activities {
|
||||
if !valid_input_activity(mode, input) {
|
||||
span_bug!(attr.span, "Invalid input activity {} for {} mode", input, mode);
|
||||
}
|
||||
}
|
||||
if !valid_ret_activity(mode, ret_activity) {
|
||||
span_bug!(attr.span, "Invalid return activity {} for {} mode", ret_activity, mode);
|
||||
}
|
||||
|
||||
Some(AutoDiffAttrs { mode, ret_activity, input_activity: arg_activities })
|
||||
}
|
||||
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
*providers = Providers { codegen_fn_attrs, should_inherit_track_caller, ..*providers };
|
||||
}
|
||||
|
|
|
@ -39,6 +39,10 @@ pub(crate) struct CguNotRecorded<'a> {
|
|||
pub cgu_name: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_autodiff_without_lto)]
|
||||
pub struct AutodiffWithoutLto;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_unknown_reuse_kind)]
|
||||
pub(crate) struct UnknownReuseKind {
|
||||
|
|
|
@ -424,12 +424,12 @@ fn compare_method_predicate_entailment<'tcx>(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
struct RemapLateParam<'a, 'tcx> {
|
||||
struct RemapLateParam<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
mapping: &'a FxIndexMap<ty::LateParamRegionKind, ty::LateParamRegionKind>,
|
||||
mapping: FxIndexMap<ty::LateParamRegionKind, ty::LateParamRegionKind>,
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateParam<'_, 'tcx> {
|
||||
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateParam<'tcx> {
|
||||
fn cx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
@ -653,6 +653,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
|||
}))),
|
||||
terr,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
return Err(diag.emit());
|
||||
}
|
||||
|
@ -1070,6 +1071,7 @@ fn report_trait_method_mismatch<'tcx>(
|
|||
}))),
|
||||
terr,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
|
||||
diag.emit()
|
||||
|
@ -1862,6 +1864,7 @@ fn compare_const_predicate_entailment<'tcx>(
|
|||
}))),
|
||||
terr,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
return Err(diag.emit());
|
||||
};
|
||||
|
|
|
@ -299,8 +299,7 @@ fn report_mismatched_rpitit_signature<'tcx>(
|
|||
})
|
||||
.collect();
|
||||
|
||||
let mut return_ty =
|
||||
trait_m_sig.output().fold_with(&mut super::RemapLateParam { tcx, mapping: &mapping });
|
||||
let mut return_ty = trait_m_sig.output().fold_with(&mut super::RemapLateParam { tcx, mapping });
|
||||
|
||||
if tcx.asyncness(impl_m_def_id).is_async() && tcx.asyncness(trait_m_def_id).is_async() {
|
||||
let ty::Alias(ty::Projection, future_ty) = return_ty.kind() else {
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
// FIXME(@lcnr): Move this module out of `rustc_hir_analysis`.
|
||||
//
|
||||
// We don't do any drop checking during hir typeck.
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::codes::*;
|
||||
use rustc_errors::{ErrorGuaranteed, struct_span_code_err};
|
||||
|
@ -32,7 +28,10 @@ use crate::hir::def_id::{DefId, LocalDefId};
|
|||
/// struct/enum definition for the nominal type itself (i.e.
|
||||
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> { ... }`).
|
||||
///
|
||||
pub fn check_drop_impl(tcx: TyCtxt<'_>, drop_impl_did: DefId) -> Result<(), ErrorGuaranteed> {
|
||||
pub(crate) fn check_drop_impl(
|
||||
tcx: TyCtxt<'_>,
|
||||
drop_impl_did: DefId,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
match tcx.impl_polarity(drop_impl_did) {
|
||||
ty::ImplPolarity::Positive => {}
|
||||
ty::ImplPolarity::Negative => {
|
||||
|
|
|
@ -199,7 +199,8 @@ pub fn check_intrinsic_type(
|
|||
let split: Vec<&str> = name_str.split('_').collect();
|
||||
assert!(split.len() >= 2, "Atomic intrinsic in an incorrect format");
|
||||
|
||||
//We only care about the operation here
|
||||
// Each atomic op has variants with different suffixes (`_seq_cst`, `_acquire`, etc.). Use
|
||||
// string ops to strip the suffixes, because the variants all get the same treatment here.
|
||||
let (n_tps, inputs, output) = match split[1] {
|
||||
"cxchg" | "cxchgweak" => (
|
||||
1,
|
||||
|
|
|
@ -455,18 +455,14 @@ fn fn_sig_suggestion<'tcx>(
|
|||
let mut output = sig.output();
|
||||
|
||||
let asyncness = if tcx.asyncness(assoc.def_id).is_async() {
|
||||
output = if let ty::Alias(_, alias_ty) = *output.kind() {
|
||||
tcx.explicit_item_self_bounds(alias_ty.def_id)
|
||||
output = if let ty::Alias(_, alias_ty) = *output.kind()
|
||||
&& let Some(output) = tcx
|
||||
.explicit_item_self_bounds(alias_ty.def_id)
|
||||
.iter_instantiated_copied(tcx, alias_ty.args)
|
||||
.find_map(|(bound, _)| {
|
||||
bound.as_projection_clause()?.no_bound_vars()?.term.as_type()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
span_bug!(
|
||||
ident.span,
|
||||
"expected async fn to have `impl Future` output, but it returns {output}"
|
||||
)
|
||||
})
|
||||
}) {
|
||||
output
|
||||
} else {
|
||||
span_bug!(
|
||||
ident.span,
|
||||
|
@ -649,6 +645,7 @@ pub fn check_function_signature<'tcx>(
|
|||
}))),
|
||||
err,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
return Err(diag.emit());
|
||||
}
|
||||
|
|
|
@ -2267,14 +2267,12 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
|
|||
|
||||
fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalModDefId) -> Result<(), ErrorGuaranteed> {
|
||||
let items = tcx.hir_module_items(module);
|
||||
let mut res = items.par_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id));
|
||||
res =
|
||||
res.and(items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)));
|
||||
res =
|
||||
res.and(items.par_trait_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)));
|
||||
res = res
|
||||
.and(items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)));
|
||||
res = res.and(items.par_opaques(|item| tcx.ensure().check_well_formed(item)));
|
||||
let res = items
|
||||
.par_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id))
|
||||
.and(items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)))
|
||||
.and(items.par_trait_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)))
|
||||
.and(items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id.def_id)))
|
||||
.and(items.par_opaques(|item| tcx.ensure().check_well_formed(item)));
|
||||
if module == LocalModDefId::CRATE_DEF_ID {
|
||||
super::entry::check_for_entry_fn(tcx);
|
||||
}
|
||||
|
|
|
@ -404,17 +404,12 @@ pub(crate) fn coerce_unsized_info<'tcx>(
|
|||
check_mutbl(mt_a, mt_b, &|ty| Ty::new_imm_ref(tcx, r_b, ty))
|
||||
}
|
||||
|
||||
(&ty::Ref(_, ty_a, mutbl_a), &ty::RawPtr(ty_b, mutbl_b)) => check_mutbl(
|
||||
ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a },
|
||||
ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b },
|
||||
&|ty| Ty::new_imm_ptr(tcx, ty),
|
||||
),
|
||||
|
||||
(&ty::RawPtr(ty_a, mutbl_a), &ty::RawPtr(ty_b, mutbl_b)) => check_mutbl(
|
||||
ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a },
|
||||
ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b },
|
||||
&|ty| Ty::new_imm_ptr(tcx, ty),
|
||||
),
|
||||
(&ty::Ref(_, ty_a, mutbl_a), &ty::RawPtr(ty_b, mutbl_b))
|
||||
| (&ty::RawPtr(ty_a, mutbl_a), &ty::RawPtr(ty_b, mutbl_b)) => {
|
||||
let mt_a = ty::TypeAndMut { ty: ty_a, mutbl: mutbl_a };
|
||||
let mt_b = ty::TypeAndMut { ty: ty_b, mutbl: mutbl_b };
|
||||
check_mutbl(mt_a, mt_b, &|ty| Ty::new_imm_ptr(tcx, ty))
|
||||
}
|
||||
|
||||
(&ty::Adt(def_a, args_a), &ty::Adt(def_b, args_b))
|
||||
if def_a.is_struct() && def_b.is_struct() =>
|
||||
|
|
|
@ -158,12 +158,12 @@ fn coherent_trait(tcx: TyCtxt<'_>, def_id: DefId) -> Result<(), ErrorGuaranteed>
|
|||
let trait_ref = trait_header.trait_ref.instantiate_identity();
|
||||
let trait_def = tcx.trait_def(trait_ref.def_id);
|
||||
|
||||
res = res.and(check_impl(tcx, impl_def_id, trait_ref, trait_def));
|
||||
res = res.and(check_object_overlap(tcx, impl_def_id, trait_ref));
|
||||
|
||||
res = res.and(unsafety::check_item(tcx, impl_def_id, trait_header, trait_def));
|
||||
res = res.and(tcx.ensure().orphan_check_impl(impl_def_id));
|
||||
res = res.and(builtin::check_trait(tcx, def_id, impl_def_id, trait_header));
|
||||
res = res
|
||||
.and(check_impl(tcx, impl_def_id, trait_ref, trait_def))
|
||||
.and(check_object_overlap(tcx, impl_def_id, trait_ref))
|
||||
.and(unsafety::check_item(tcx, impl_def_id, trait_header, trait_def))
|
||||
.and(tcx.ensure().orphan_check_impl(impl_def_id))
|
||||
.and(builtin::check_trait(tcx, def_id, impl_def_id, trait_header));
|
||||
}
|
||||
|
||||
res
|
||||
|
|
|
@ -57,7 +57,7 @@ mod type_of;
|
|||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
resolve_bound_vars::provide(providers);
|
||||
*providers = Providers {
|
||||
type_of: type_of::type_of,
|
||||
|
@ -122,7 +122,7 @@ pub fn provide(providers: &mut Providers) {
|
|||
/// `ItemCtxt` is parameterized by a `DefId` that it uses to satisfy
|
||||
/// `probe_ty_param_bounds` requests, drawing the information from
|
||||
/// the HIR (`hir::Generics`), recursively.
|
||||
pub struct ItemCtxt<'tcx> {
|
||||
pub(crate) struct ItemCtxt<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
item_def_id: LocalDefId,
|
||||
tainted_by_errors: Cell<Option<ErrorGuaranteed>>,
|
||||
|
@ -148,7 +148,7 @@ impl<'v> Visitor<'v> for HirPlaceholderCollector {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct CollectItemTypesVisitor<'tcx> {
|
||||
pub(crate) struct CollectItemTypesVisitor<'tcx> {
|
||||
pub tcx: TyCtxt<'tcx>,
|
||||
}
|
||||
|
||||
|
@ -364,19 +364,19 @@ fn bad_placeholder<'cx, 'tcx>(
|
|||
}
|
||||
|
||||
impl<'tcx> ItemCtxt<'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'tcx>, item_def_id: LocalDefId) -> ItemCtxt<'tcx> {
|
||||
pub(crate) fn new(tcx: TyCtxt<'tcx>, item_def_id: LocalDefId) -> ItemCtxt<'tcx> {
|
||||
ItemCtxt { tcx, item_def_id, tainted_by_errors: Cell::new(None) }
|
||||
}
|
||||
|
||||
pub fn lower_ty(&self, hir_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
pub(crate) fn lower_ty(&self, hir_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
self.lowerer().lower_ty(hir_ty)
|
||||
}
|
||||
|
||||
pub fn hir_id(&self) -> hir::HirId {
|
||||
pub(crate) fn hir_id(&self) -> hir::HirId {
|
||||
self.tcx.local_def_id_to_hir_id(self.item_def_id)
|
||||
}
|
||||
|
||||
pub fn node(&self) -> hir::Node<'tcx> {
|
||||
pub(crate) fn node(&self) -> hir::Node<'tcx> {
|
||||
self.tcx.hir_node(self.hir_id())
|
||||
}
|
||||
|
||||
|
|
|
@ -100,36 +100,19 @@ enum InheritanceKind {
|
|||
Own,
|
||||
}
|
||||
|
||||
struct GenericsBuilder<'tcx> {
|
||||
fn build_generics<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
sig_id: DefId,
|
||||
parent: Option<DefId>,
|
||||
inh_kind: InheritanceKind,
|
||||
}
|
||||
|
||||
impl<'tcx> GenericsBuilder<'tcx> {
|
||||
fn new(tcx: TyCtxt<'tcx>, sig_id: DefId) -> GenericsBuilder<'tcx> {
|
||||
GenericsBuilder { tcx, sig_id, parent: None, inh_kind: InheritanceKind::WithParent(false) }
|
||||
}
|
||||
|
||||
fn with_parent(mut self, parent: DefId) -> Self {
|
||||
self.parent = Some(parent);
|
||||
self
|
||||
}
|
||||
|
||||
fn with_inheritance_kind(mut self, inh_kind: InheritanceKind) -> Self {
|
||||
self.inh_kind = inh_kind;
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> ty::Generics {
|
||||
) -> ty::Generics {
|
||||
let mut own_params = vec![];
|
||||
|
||||
let sig_generics = self.tcx.generics_of(self.sig_id);
|
||||
if let InheritanceKind::WithParent(has_self) = self.inh_kind
|
||||
let sig_generics = tcx.generics_of(sig_id);
|
||||
if let InheritanceKind::WithParent(has_self) = inh_kind
|
||||
&& let Some(parent_def_id) = sig_generics.parent
|
||||
{
|
||||
let sig_parent_generics = self.tcx.generics_of(parent_def_id);
|
||||
let sig_parent_generics = tcx.generics_of(parent_def_id);
|
||||
own_params.append(&mut sig_parent_generics.own_params.clone());
|
||||
if !has_self {
|
||||
own_params.remove(0);
|
||||
|
@ -155,9 +138,9 @@ impl<'tcx> GenericsBuilder<'tcx> {
|
|||
let param_def_id_to_index =
|
||||
own_params.iter().map(|param| (param.def_id, param.index)).collect();
|
||||
|
||||
let (parent_count, has_self) = if let Some(def_id) = self.parent {
|
||||
let parent_generics = self.tcx.generics_of(def_id);
|
||||
let parent_kind = self.tcx.def_kind(def_id);
|
||||
let (parent_count, has_self) = if let Some(def_id) = parent {
|
||||
let parent_generics = tcx.generics_of(def_id);
|
||||
let parent_kind = tcx.def_kind(def_id);
|
||||
(parent_generics.count(), parent_kind == DefKind::Trait)
|
||||
} else {
|
||||
(0, false)
|
||||
|
@ -180,7 +163,7 @@ impl<'tcx> GenericsBuilder<'tcx> {
|
|||
}
|
||||
|
||||
ty::Generics {
|
||||
parent: self.parent,
|
||||
parent,
|
||||
parent_count,
|
||||
own_params,
|
||||
param_def_id_to_index,
|
||||
|
@ -188,42 +171,14 @@ impl<'tcx> GenericsBuilder<'tcx> {
|
|||
has_late_bound_regions: sig_generics.has_late_bound_regions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct PredicatesBuilder<'tcx> {
|
||||
fn build_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
sig_id: DefId,
|
||||
parent: Option<DefId>,
|
||||
inh_kind: InheritanceKind,
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> PredicatesBuilder<'tcx> {
|
||||
fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
sig_id: DefId,
|
||||
) -> PredicatesBuilder<'tcx> {
|
||||
PredicatesBuilder {
|
||||
tcx,
|
||||
sig_id,
|
||||
parent: None,
|
||||
inh_kind: InheritanceKind::WithParent(false),
|
||||
args,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_parent(mut self, parent: DefId) -> Self {
|
||||
self.parent = Some(parent);
|
||||
self
|
||||
}
|
||||
|
||||
fn with_inheritance_kind(mut self, inh_kind: InheritanceKind) -> Self {
|
||||
self.inh_kind = inh_kind;
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> ty::GenericPredicates<'tcx> {
|
||||
) -> ty::GenericPredicates<'tcx> {
|
||||
struct PredicatesCollector<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
preds: Vec<(ty::Clause<'tcx>, Span)>,
|
||||
|
@ -257,65 +212,51 @@ impl<'tcx> PredicatesBuilder<'tcx> {
|
|||
self.with_own_preds(f, def_id)
|
||||
}
|
||||
}
|
||||
let collector = PredicatesCollector::new(self.tcx, self.args);
|
||||
let collector = PredicatesCollector::new(tcx, args);
|
||||
|
||||
// `explicit_predicates_of` is used here to avoid copying `Self: Trait` predicate.
|
||||
// Note: `predicates_of` query can also add inferred outlives predicates, but that
|
||||
// is not the case here as `sig_id` is either a trait or a function.
|
||||
let preds = match self.inh_kind {
|
||||
let preds = match inh_kind {
|
||||
InheritanceKind::WithParent(false) => {
|
||||
collector.with_preds(|def_id| self.tcx.explicit_predicates_of(def_id), self.sig_id)
|
||||
collector.with_preds(|def_id| tcx.explicit_predicates_of(def_id), sig_id)
|
||||
}
|
||||
InheritanceKind::WithParent(true) => {
|
||||
collector.with_preds(|def_id| self.tcx.predicates_of(def_id), self.sig_id)
|
||||
collector.with_preds(|def_id| tcx.predicates_of(def_id), sig_id)
|
||||
}
|
||||
InheritanceKind::Own => {
|
||||
collector.with_own_preds(|def_id| self.tcx.predicates_of(def_id), self.sig_id)
|
||||
collector.with_own_preds(|def_id| tcx.predicates_of(def_id), sig_id)
|
||||
}
|
||||
}
|
||||
.preds;
|
||||
|
||||
ty::GenericPredicates {
|
||||
parent: self.parent,
|
||||
predicates: self.tcx.arena.alloc_from_iter(preds),
|
||||
}
|
||||
}
|
||||
ty::GenericPredicates { parent, predicates: tcx.arena.alloc_from_iter(preds) }
|
||||
}
|
||||
|
||||
struct GenericArgsBuilder<'tcx> {
|
||||
fn build_generic_args<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
remap_table: RemapTable,
|
||||
sig_id: DefId,
|
||||
def_id: LocalDefId,
|
||||
}
|
||||
|
||||
impl<'tcx> GenericArgsBuilder<'tcx> {
|
||||
fn new(tcx: TyCtxt<'tcx>, sig_id: DefId, def_id: LocalDefId) -> GenericArgsBuilder<'tcx> {
|
||||
GenericArgsBuilder { tcx, remap_table: FxHashMap::default(), sig_id, def_id }
|
||||
}
|
||||
|
||||
fn build_from_args(mut self, args: ty::GenericArgsRef<'tcx>) -> ty::GenericArgsRef<'tcx> {
|
||||
let caller_generics = self.tcx.generics_of(self.def_id);
|
||||
let callee_generics = self.tcx.generics_of(self.sig_id);
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
) -> ty::GenericArgsRef<'tcx> {
|
||||
let caller_generics = tcx.generics_of(def_id);
|
||||
let callee_generics = tcx.generics_of(sig_id);
|
||||
|
||||
let mut remap_table = FxHashMap::default();
|
||||
for caller_param in &caller_generics.own_params {
|
||||
let callee_index =
|
||||
callee_generics.param_def_id_to_index(self.tcx, caller_param.def_id).unwrap();
|
||||
self.remap_table.insert(callee_index, caller_param.index);
|
||||
let callee_index = callee_generics.param_def_id_to_index(tcx, caller_param.def_id).unwrap();
|
||||
remap_table.insert(callee_index, caller_param.index);
|
||||
}
|
||||
|
||||
let mut folder = ParamIndexRemapper { tcx: self.tcx, remap_table: self.remap_table };
|
||||
let mut folder = ParamIndexRemapper { tcx, remap_table };
|
||||
args.fold_with(&mut folder)
|
||||
}
|
||||
}
|
||||
|
||||
fn create_generic_args<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: LocalDefId,
|
||||
sig_id: DefId,
|
||||
) -> ty::GenericArgsRef<'tcx> {
|
||||
let builder = GenericArgsBuilder::new(tcx, sig_id, def_id);
|
||||
|
||||
let caller_kind = fn_kind(tcx, def_id.into());
|
||||
let callee_kind = fn_kind(tcx, sig_id);
|
||||
match (caller_kind, callee_kind) {
|
||||
|
@ -325,7 +266,7 @@ fn create_generic_args<'tcx>(
|
|||
| (FnKind::AssocTrait, FnKind::Free)
|
||||
| (FnKind::AssocTrait, FnKind::AssocTrait) => {
|
||||
let args = ty::GenericArgs::identity_for_item(tcx, sig_id);
|
||||
builder.build_from_args(args)
|
||||
build_generic_args(tcx, sig_id, def_id, args)
|
||||
}
|
||||
|
||||
(FnKind::AssocTraitImpl, FnKind::AssocTrait) => {
|
||||
|
@ -335,8 +276,9 @@ fn create_generic_args<'tcx>(
|
|||
tcx.impl_trait_header(parent).unwrap().trait_ref.instantiate_identity().args;
|
||||
|
||||
let trait_args = ty::GenericArgs::identity_for_item(tcx, sig_id);
|
||||
let method_args = tcx.mk_args_from_iter(trait_args.iter().skip(callee_generics.parent_count));
|
||||
let method_args = builder.build_from_args(method_args);
|
||||
let method_args =
|
||||
tcx.mk_args_from_iter(trait_args.iter().skip(callee_generics.parent_count));
|
||||
let method_args = build_generic_args(tcx, sig_id, def_id, method_args);
|
||||
|
||||
tcx.mk_args_from_iter(parent_args.iter().chain(method_args))
|
||||
}
|
||||
|
@ -347,16 +289,16 @@ fn create_generic_args<'tcx>(
|
|||
let generic_self_ty = ty::GenericArg::from(self_ty);
|
||||
|
||||
let trait_args = ty::GenericArgs::identity_for_item(tcx, sig_id);
|
||||
let trait_args = builder.build_from_args(trait_args);
|
||||
let trait_args = build_generic_args(tcx, sig_id, def_id, trait_args);
|
||||
|
||||
let args = std::iter::once(generic_self_ty).chain(trait_args.iter().skip(1));
|
||||
tcx.mk_args_from_iter(args)
|
||||
}
|
||||
|
||||
// For trait impl's `sig_id` is always equal to the corresponding trait method.
|
||||
// For inherent methods delegation is not yet supported.
|
||||
(FnKind::AssocTraitImpl, _)
|
||||
| (_, FnKind::AssocTraitImpl)
|
||||
// Delegation to inherent methods is not yet supported.
|
||||
| (_, FnKind::AssocInherentImpl) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
@ -377,39 +319,31 @@ pub(crate) fn inherit_generics_for_delegation_item<'tcx>(
|
|||
def_id: LocalDefId,
|
||||
sig_id: DefId,
|
||||
) -> ty::Generics {
|
||||
let builder = GenericsBuilder::new(tcx, sig_id);
|
||||
|
||||
let caller_kind = fn_kind(tcx, def_id.into());
|
||||
let callee_kind = fn_kind(tcx, sig_id);
|
||||
match (caller_kind, callee_kind) {
|
||||
(FnKind::Free, FnKind::Free)
|
||||
| (FnKind::Free, FnKind::AssocTrait) => builder.with_inheritance_kind(InheritanceKind::WithParent(true)).build(),
|
||||
(FnKind::Free, FnKind::Free) | (FnKind::Free, FnKind::AssocTrait) => {
|
||||
build_generics(tcx, sig_id, None, InheritanceKind::WithParent(true))
|
||||
}
|
||||
|
||||
(FnKind::AssocTraitImpl, FnKind::AssocTrait) => {
|
||||
builder
|
||||
.with_parent(tcx.parent(def_id.into()))
|
||||
.with_inheritance_kind(InheritanceKind::Own)
|
||||
.build()
|
||||
build_generics(tcx, sig_id, Some(tcx.parent(def_id.into())), InheritanceKind::Own)
|
||||
}
|
||||
|
||||
(FnKind::AssocInherentImpl, FnKind::AssocTrait)
|
||||
| (FnKind::AssocTrait, FnKind::AssocTrait) => {
|
||||
builder
|
||||
.with_parent(tcx.parent(def_id.into()))
|
||||
.build()
|
||||
}
|
||||
|
||||
(FnKind::AssocInherentImpl, FnKind::Free)
|
||||
| (FnKind::AssocTrait, FnKind::Free) => {
|
||||
builder
|
||||
.with_parent(tcx.parent(def_id.into()))
|
||||
.build()
|
||||
}
|
||||
| (FnKind::AssocTrait, FnKind::AssocTrait)
|
||||
| (FnKind::AssocInherentImpl, FnKind::Free)
|
||||
| (FnKind::AssocTrait, FnKind::Free) => build_generics(
|
||||
tcx,
|
||||
sig_id,
|
||||
Some(tcx.parent(def_id.into())),
|
||||
InheritanceKind::WithParent(false),
|
||||
),
|
||||
|
||||
// For trait impl's `sig_id` is always equal to the corresponding trait method.
|
||||
// For inherent methods delegation is not yet supported.
|
||||
(FnKind::AssocTraitImpl, _)
|
||||
| (_, FnKind::AssocTraitImpl)
|
||||
// Delegation to inherent methods is not yet supported.
|
||||
| (_, FnKind::AssocInherentImpl) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
@ -420,36 +354,36 @@ pub(crate) fn inherit_predicates_for_delegation_item<'tcx>(
|
|||
sig_id: DefId,
|
||||
) -> ty::GenericPredicates<'tcx> {
|
||||
let args = create_generic_args(tcx, def_id, sig_id);
|
||||
let builder = PredicatesBuilder::new(tcx, args, sig_id);
|
||||
|
||||
let caller_kind = fn_kind(tcx, def_id.into());
|
||||
let callee_kind = fn_kind(tcx, sig_id);
|
||||
match (caller_kind, callee_kind) {
|
||||
(FnKind::Free, FnKind::Free)
|
||||
| (FnKind::Free, FnKind::AssocTrait) => {
|
||||
builder.with_inheritance_kind(InheritanceKind::WithParent(true)).build()
|
||||
(FnKind::Free, FnKind::Free) | (FnKind::Free, FnKind::AssocTrait) => {
|
||||
build_predicates(tcx, sig_id, None, InheritanceKind::WithParent(true), args)
|
||||
}
|
||||
|
||||
(FnKind::AssocTraitImpl, FnKind::AssocTrait) => {
|
||||
builder
|
||||
.with_parent(tcx.parent(def_id.into()))
|
||||
.with_inheritance_kind(InheritanceKind::Own)
|
||||
.build()
|
||||
}
|
||||
(FnKind::AssocTraitImpl, FnKind::AssocTrait) => build_predicates(
|
||||
tcx,
|
||||
sig_id,
|
||||
Some(tcx.parent(def_id.into())),
|
||||
InheritanceKind::Own,
|
||||
args,
|
||||
),
|
||||
|
||||
(FnKind::AssocInherentImpl, FnKind::AssocTrait)
|
||||
| (FnKind::AssocTrait, FnKind::AssocTrait)
|
||||
| (FnKind::AssocInherentImpl, FnKind::Free)
|
||||
| (FnKind::AssocTrait, FnKind::Free) => {
|
||||
builder
|
||||
.with_parent(tcx.parent(def_id.into()))
|
||||
.build()
|
||||
}
|
||||
| (FnKind::AssocTrait, FnKind::Free) => build_predicates(
|
||||
tcx,
|
||||
sig_id,
|
||||
Some(tcx.parent(def_id.into())),
|
||||
InheritanceKind::WithParent(false),
|
||||
args,
|
||||
),
|
||||
|
||||
// For trait impl's `sig_id` is always equal to the corresponding trait method.
|
||||
// For inherent methods delegation is not yet supported.
|
||||
(FnKind::AssocTraitImpl, _)
|
||||
| (_, FnKind::AssocTraitImpl)
|
||||
// Delegation to inherent methods is not yet supported.
|
||||
| (_, FnKind::AssocInherentImpl) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -495,6 +495,7 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
|
|||
.iter()
|
||||
.any(|constraint| constraint.ident.name == item.name)
|
||||
})
|
||||
.filter(|item| !item.is_impl_trait_in_trait())
|
||||
.map(|item| self.tcx.item_ident(item.def_id).to_string())
|
||||
.collect()
|
||||
} else {
|
||||
|
|
|
@ -273,7 +273,7 @@ pub fn lower_generic_args<'tcx: 'a, 'a>(
|
|||
|
||||
// We lower to an infer even when the feature gate is not enabled
|
||||
// as it is useful for diagnostics to be able to see a `ConstKind::Infer`
|
||||
args.push(ctx.provided_kind(&args, param, arg));
|
||||
args.push(ctx.provided_kind(param, arg));
|
||||
args_iter.next();
|
||||
params.next();
|
||||
}
|
||||
|
|
|
@ -296,7 +296,6 @@ pub trait GenericArgsLowerer<'a, 'tcx> {
|
|||
|
||||
fn provided_kind(
|
||||
&mut self,
|
||||
preceding_args: &[ty::GenericArg<'tcx>],
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx>;
|
||||
|
@ -480,7 +479,6 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
|
||||
fn provided_kind(
|
||||
&mut self,
|
||||
_preceding_args: &[ty::GenericArg<'tcx>],
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx> {
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
//! impl<T, I: Iterator<Item=T>> SpecExtend<T> for I { /* default impl */ }
|
||||
//! ```
|
||||
//!
|
||||
//! We get that the generic pamameters for `impl2` are `[T, std::vec::IntoIter<T>]`.
|
||||
//! We get that the generic parameters for `impl2` are `[T, std::vec::IntoIter<T>]`.
|
||||
//! `T` is constrained to be `<I as Iterator>::Item`, so we check only
|
||||
//! `std::vec::IntoIter<T>` for repeated parameters, which it doesn't have. The
|
||||
//! predicates of `impl1` are only `T: Sized`, which is also a predicate of
|
||||
|
@ -119,7 +119,6 @@ fn check_always_applicable(
|
|||
impl2_node: Node,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let span = tcx.def_span(impl1_def_id);
|
||||
let mut res = check_has_items(tcx, impl1_def_id, impl2_node, span);
|
||||
|
||||
let (impl1_args, impl2_args) = get_impl_args(tcx, impl1_def_id, impl2_node)?;
|
||||
let impl2_def_id = impl2_node.def_id();
|
||||
|
@ -131,11 +130,10 @@ fn check_always_applicable(
|
|||
unconstrained_parent_impl_args(tcx, impl2_def_id, impl2_args)
|
||||
};
|
||||
|
||||
res = res.and(check_static_lifetimes(tcx, &parent_args, span));
|
||||
res = res.and(check_duplicate_params(tcx, impl1_args, parent_args, span));
|
||||
res = res.and(check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span));
|
||||
|
||||
res
|
||||
check_has_items(tcx, impl1_def_id, impl2_node, span)
|
||||
.and(check_static_lifetimes(tcx, &parent_args, span))
|
||||
.and(check_duplicate_params(tcx, impl1_args, parent_args, span))
|
||||
.and(check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span))
|
||||
}
|
||||
|
||||
fn check_has_items(
|
||||
|
|
|
@ -83,12 +83,11 @@ pub mod autoderef;
|
|||
mod bounds;
|
||||
mod check_unused;
|
||||
mod coherence;
|
||||
mod delegation;
|
||||
pub mod hir_ty_lowering;
|
||||
// FIXME: This module shouldn't be public.
|
||||
pub mod collect;
|
||||
mod collect;
|
||||
mod constrained_generic_params;
|
||||
mod delegation;
|
||||
mod errors;
|
||||
pub mod hir_ty_lowering;
|
||||
pub mod hir_wf_check;
|
||||
mod impl_wf_check;
|
||||
mod outlives;
|
||||
|
@ -104,7 +103,8 @@ use rustc_middle::ty::{self, Const, Ty, TyCtxt};
|
|||
use rustc_span::Span;
|
||||
use rustc_trait_selection::traits;
|
||||
|
||||
use self::hir_ty_lowering::{FeedConstTy, HirTyLowerer};
|
||||
pub use crate::collect::suggest_impl_trait;
|
||||
use crate::hir_ty_lowering::{FeedConstTy, HirTyLowerer};
|
||||
|
||||
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ pub(super) fn infer_predicates(
|
|||
|
||||
// If new predicates were added then we need to re-calculate
|
||||
// all crates since there could be new implied predicates.
|
||||
'outer: loop {
|
||||
loop {
|
||||
let mut predicates_added = false;
|
||||
|
||||
// Visit all the crates and infer predicates
|
||||
|
@ -90,7 +90,7 @@ pub(super) fn infer_predicates(
|
|||
}
|
||||
|
||||
if !predicates_added {
|
||||
break 'outer;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,9 +27,6 @@ mod solve;
|
|||
|
||||
pub(crate) mod dump;
|
||||
|
||||
/// Code for transforming variances.
|
||||
mod xform;
|
||||
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
*providers = Providers { variances_of, crate_variances, ..*providers };
|
||||
}
|
||||
|
|
|
@ -12,8 +12,26 @@ use tracing::debug;
|
|||
use super::constraints::*;
|
||||
use super::terms::VarianceTerm::*;
|
||||
use super::terms::*;
|
||||
use super::xform::*;
|
||||
|
||||
fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
|
||||
// Greatest lower bound of the variance lattice as defined in The Paper:
|
||||
//
|
||||
// *
|
||||
// - +
|
||||
// o
|
||||
match (v1, v2) {
|
||||
(ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant,
|
||||
|
||||
(ty::Covariant, ty::Contravariant) => ty::Invariant,
|
||||
(ty::Contravariant, ty::Covariant) => ty::Invariant,
|
||||
|
||||
(ty::Covariant, ty::Covariant) => ty::Covariant,
|
||||
|
||||
(ty::Contravariant, ty::Contravariant) => ty::Contravariant,
|
||||
|
||||
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
|
||||
}
|
||||
}
|
||||
struct SolveContext<'a, 'tcx> {
|
||||
terms_cx: TermsContext<'a, 'tcx>,
|
||||
constraints: Vec<Constraint<'a>>,
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
use rustc_middle::ty;
|
||||
|
||||
pub(crate) fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
|
||||
// Greatest lower bound of the variance lattice as
|
||||
// defined in The Paper:
|
||||
//
|
||||
// *
|
||||
// - +
|
||||
// o
|
||||
match (v1, v2) {
|
||||
(ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant,
|
||||
|
||||
(ty::Covariant, ty::Contravariant) => ty::Invariant,
|
||||
(ty::Contravariant, ty::Covariant) => ty::Invariant,
|
||||
|
||||
(ty::Covariant, ty::Covariant) => ty::Covariant,
|
||||
|
||||
(ty::Contravariant, ty::Contravariant) => ty::Contravariant,
|
||||
|
||||
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
|
||||
}
|
||||
}
|
|
@ -1261,7 +1261,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
|
||||
fn provided_kind(
|
||||
&mut self,
|
||||
_preceding_args: &[ty::GenericArg<'tcx>],
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx> {
|
||||
|
|
|
@ -1147,6 +1147,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
Some(self.param_env.and(trace.values)),
|
||||
e,
|
||||
true,
|
||||
None,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,12 +31,12 @@ use crate::{CoroutineTypes, Diverges, EnclosingBreakables, TypeckRootCtxt};
|
|||
/// functions, closures, and `const`s, including performing type inference
|
||||
/// with [`InferCtxt`].
|
||||
///
|
||||
/// This is in contrast to [`ItemCtxt`], which is used to type-check item *signatures*
|
||||
/// and thus does not perform type inference.
|
||||
/// This is in contrast to `rustc_hir_analysis::collect::ItemCtxt`, which is
|
||||
/// used to type-check item *signatures* and thus does not perform type
|
||||
/// inference.
|
||||
///
|
||||
/// See [`ItemCtxt`]'s docs for more.
|
||||
/// See `ItemCtxt`'s docs for more.
|
||||
///
|
||||
/// [`ItemCtxt`]: rustc_hir_analysis::collect::ItemCtxt
|
||||
/// [`InferCtxt`]: infer::InferCtxt
|
||||
pub(crate) struct FnCtxt<'a, 'tcx> {
|
||||
pub(super) body_id: LocalDefId,
|
||||
|
|
|
@ -12,8 +12,8 @@ use rustc_hir::{
|
|||
GenericBound, HirId, Node, PatExpr, PatExprKind, Path, QPath, Stmt, StmtKind, TyKind,
|
||||
WherePredicateKind, expr_needs_parens,
|
||||
};
|
||||
use rustc_hir_analysis::collect::suggest_impl_trait;
|
||||
use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer;
|
||||
use rustc_hir_analysis::suggest_impl_trait;
|
||||
use rustc_middle::lint::in_external_macro;
|
||||
use rustc_middle::middle::stability::EvalResult;
|
||||
use rustc_middle::span_bug;
|
||||
|
|
|
@ -413,7 +413,6 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
|||
|
||||
fn provided_kind(
|
||||
&mut self,
|
||||
_preceding_args: &[ty::GenericArg<'tcx>],
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx> {
|
||||
|
|
|
@ -8,12 +8,12 @@ use rustc_data_structures::profiling::TimePassesFormat;
|
|||
use rustc_errors::emitter::HumanReadableErrorType;
|
||||
use rustc_errors::{ColorConfig, registry};
|
||||
use rustc_session::config::{
|
||||
BranchProtection, CFGuard, Cfg, CollapseMacroDebuginfo, CoverageLevel, CoverageOptions,
|
||||
DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, Externs,
|
||||
FmtDebug, FunctionReturn, InliningThreshold, Input, InstrumentCoverage, InstrumentXRay,
|
||||
LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, MirIncludeSpans, NextSolverConfig,
|
||||
OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, Passes,
|
||||
PatchableFunctionEntry, Polonius, ProcMacroExecutionStrategy, Strip, SwitchWithOptPath,
|
||||
AutoDiff, BranchProtection, CFGuard, Cfg, CollapseMacroDebuginfo, CoverageLevel,
|
||||
CoverageOptions, DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation,
|
||||
Externs, FmtDebug, FunctionReturn, InliningThreshold, Input, InstrumentCoverage,
|
||||
InstrumentXRay, LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, MirIncludeSpans,
|
||||
NextSolverConfig, OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet,
|
||||
Passes, PatchableFunctionEntry, Polonius, ProcMacroExecutionStrategy, Strip, SwitchWithOptPath,
|
||||
SymbolManglingVersion, WasiExecModel, build_configuration, build_session_options,
|
||||
rustc_optgroups,
|
||||
};
|
||||
|
@ -760,6 +760,7 @@ fn test_unstable_options_tracking_hash() {
|
|||
tracked!(allow_features, Some(vec![String::from("lang_items")]));
|
||||
tracked!(always_encode_mir, true);
|
||||
tracked!(assume_incomplete_release, true);
|
||||
tracked!(autodiff, vec![AutoDiff::Print]);
|
||||
tracked!(binary_dep_depinfo, true);
|
||||
tracked!(box_noalias, false);
|
||||
tracked!(
|
||||
|
|
|
@ -965,7 +965,8 @@ extern "C" LLVMValueRef LLVMRustGetLastInstruction(LLVMBasicBlockRef BB) {
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
extern "C" void LLVMRustEraseInstBefore(LLVMBasicBlockRef bb, LLVMValueRef I) {
|
||||
extern "C" void LLVMRustEraseInstUntilInclusive(LLVMBasicBlockRef bb,
|
||||
LLVMValueRef I) {
|
||||
auto &BB = *unwrap(bb);
|
||||
auto &Inst = *unwrap<Instruction>(I);
|
||||
auto It = BB.begin();
|
||||
|
@ -973,8 +974,6 @@ extern "C" void LLVMRustEraseInstBefore(LLVMBasicBlockRef bb, LLVMValueRef I) {
|
|||
++It;
|
||||
// Make sure we found the Instruction.
|
||||
assert(It != BB.end());
|
||||
// We don't want to erase the instruction itself.
|
||||
It--;
|
||||
// Delete in rev order to ensure no dangling references.
|
||||
while (It != BB.begin()) {
|
||||
auto Prev = std::prev(It);
|
||||
|
|
|
@ -32,6 +32,8 @@ middle_assert_shl_overflow =
|
|||
middle_assert_shr_overflow =
|
||||
attempt to shift right by `{$val}`, which would overflow
|
||||
|
||||
middle_autodiff_unsafe_inner_const_ref = reading from a `Duplicated` const {$ty} is unsafe
|
||||
|
||||
middle_bounds_check =
|
||||
index out of bounds: the length is {$len} but the index is {$index}
|
||||
|
||||
|
@ -107,6 +109,8 @@ middle_type_length_limit = reached the type-length limit while instantiating `{$
|
|||
middle_unknown_layout =
|
||||
the type `{$ty}` has an unknown layout
|
||||
|
||||
middle_unsupported_union = we don't support unions yet: '{$ty_name}'
|
||||
|
||||
middle_values_too_big =
|
||||
values of the type `{$ty}` are too big for the target architecture
|
||||
|
||||
|
|
|
@ -87,6 +87,7 @@ macro_rules! arena_types {
|
|||
[] codegen_unit: rustc_middle::mir::mono::CodegenUnit<'tcx>,
|
||||
[decode] attribute: rustc_hir::Attribute,
|
||||
[] name_set: rustc_data_structures::unord::UnordSet<rustc_span::Symbol>,
|
||||
[] autodiff_item: rustc_ast::expand::autodiff_attrs::AutoDiffItem,
|
||||
[] ordered_name_set: rustc_data_structures::fx::FxIndexSet<rustc_span::Symbol>,
|
||||
[] pats: rustc_middle::ty::PatternKind<'tcx>,
|
||||
|
||||
|
|
|
@ -37,6 +37,20 @@ pub struct OpaqueHiddenTypeMismatch<'tcx> {
|
|||
pub sub: TypeMismatchReason,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(middle_unsupported_union)]
|
||||
pub struct UnsupportedUnion {
|
||||
pub ty_name: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(middle_autodiff_unsafe_inner_const_ref)]
|
||||
pub struct AutodiffUnsafeInnerConstRef {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub ty: String,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum TypeMismatchReason {
|
||||
#[label(middle_conflict_types)]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use rustc_abi::Align;
|
||||
use rustc_ast::expand::autodiff_attrs::AutoDiffAttrs;
|
||||
use rustc_attr_parsing::{InlineAttr, InstructionSetAttr, OptimizeAttr};
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable};
|
||||
use rustc_span::Symbol;
|
||||
|
@ -52,6 +53,8 @@ pub struct CodegenFnAttrs {
|
|||
/// The `#[patchable_function_entry(...)]` attribute. Indicates how many nops should be around
|
||||
/// the function entry.
|
||||
pub patchable_function_entry: Option<PatchableFunctionEntry>,
|
||||
/// For the `#[autodiff]` macros.
|
||||
pub autodiff_item: Option<AutoDiffAttrs>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)]
|
||||
|
@ -160,6 +163,7 @@ impl CodegenFnAttrs {
|
|||
instruction_set: None,
|
||||
alignment: None,
|
||||
patchable_function_entry: None,
|
||||
autodiff_item: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
|
||||
use rustc_ast::expand::autodiff_attrs::AutoDiffItem;
|
||||
use rustc_attr_parsing::InlineAttr;
|
||||
use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN};
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
|
@ -246,6 +247,7 @@ impl ToStableHashKey<StableHashingContext<'_>> for MonoItem<'_> {
|
|||
pub struct MonoItemPartitions<'tcx> {
|
||||
pub codegen_units: &'tcx [CodegenUnit<'tcx>],
|
||||
pub all_mono_items: &'tcx DefIdSet,
|
||||
pub autodiff_items: &'tcx [AutoDiffItem],
|
||||
}
|
||||
|
||||
#[derive(Debug, HashStable)]
|
||||
|
|
|
@ -6,6 +6,7 @@ edition = "2021"
|
|||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
|
@ -15,6 +16,7 @@ rustc_macros = { path = "../rustc_macros" }
|
|||
rustc_middle = { path = "../rustc_middle" }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
rustc_symbol_mangling = { path = "../rustc_symbol_mangling" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
|
|
|
@ -257,7 +257,7 @@ struct SharedState<'tcx> {
|
|||
|
||||
pub(crate) struct UsageMap<'tcx> {
|
||||
// Maps every mono item to the mono items used by it.
|
||||
used_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
|
||||
pub used_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
|
||||
|
||||
// Maps every mono item to the mono items that use it.
|
||||
user_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
|
||||
|
|
|
@ -92,6 +92,8 @@
|
|||
//! source-level module, functions from the same module will be available for
|
||||
//! inlining, even when they are not marked `#[inline]`.
|
||||
|
||||
mod autodiff;
|
||||
|
||||
use std::cmp;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::fs::{self, File};
|
||||
|
@ -251,7 +253,17 @@ where
|
|||
can_export_generics,
|
||||
always_export_generics,
|
||||
);
|
||||
if visibility == Visibility::Hidden && can_be_internalized {
|
||||
|
||||
// We can't differentiate something that got inlined.
|
||||
let autodiff_active = cfg!(llvm_enzyme)
|
||||
&& cx
|
||||
.tcx
|
||||
.codegen_fn_attrs(mono_item.def_id())
|
||||
.autodiff_item
|
||||
.as_ref()
|
||||
.is_some_and(|ad| ad.is_active());
|
||||
|
||||
if !autodiff_active && visibility == Visibility::Hidden && can_be_internalized {
|
||||
internalization_candidates.insert(mono_item);
|
||||
}
|
||||
let size_estimate = mono_item.size_estimate(cx.tcx);
|
||||
|
@ -1176,6 +1188,18 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio
|
|||
})
|
||||
.collect();
|
||||
|
||||
let autodiff_mono_items: Vec<_> = items
|
||||
.iter()
|
||||
.filter_map(|item| match *item {
|
||||
MonoItem::Fn(ref instance) => Some((item, instance)),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let autodiff_items =
|
||||
autodiff::find_autodiff_source_functions(tcx, &usage_map, autodiff_mono_items);
|
||||
let autodiff_items = tcx.arena.alloc_from_iter(autodiff_items);
|
||||
|
||||
// Output monomorphization stats per def_id
|
||||
if let SwitchWithOptPath::Enabled(ref path) = tcx.sess.opts.unstable_opts.dump_mono_stats {
|
||||
if let Err(err) =
|
||||
|
@ -1236,7 +1260,11 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio
|
|||
}
|
||||
}
|
||||
|
||||
MonoItemPartitions { all_mono_items: tcx.arena.alloc(mono_items), codegen_units }
|
||||
MonoItemPartitions {
|
||||
all_mono_items: tcx.arena.alloc(mono_items),
|
||||
codegen_units,
|
||||
autodiff_items,
|
||||
}
|
||||
}
|
||||
|
||||
/// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s
|
||||
|
|
121
compiler/rustc_monomorphize/src/partitioning/autodiff.rs
Normal file
121
compiler/rustc_monomorphize/src/partitioning/autodiff.rs
Normal file
|
@ -0,0 +1,121 @@
|
|||
use rustc_ast::expand::autodiff_attrs::{AutoDiffItem, DiffActivity};
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::mono::MonoItem;
|
||||
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
||||
use rustc_symbol_mangling::symbol_name_for_instance_in_crate;
|
||||
use tracing::{debug, trace};
|
||||
|
||||
use crate::partitioning::UsageMap;
|
||||
|
||||
fn adjust_activity_to_abi<'tcx>(tcx: TyCtxt<'tcx>, fn_ty: Ty<'tcx>, da: &mut Vec<DiffActivity>) {
|
||||
if !matches!(fn_ty.kind(), ty::FnDef(..)) {
|
||||
bug!("expected fn def for autodiff, got {:?}", fn_ty);
|
||||
}
|
||||
let fnc_binder: ty::Binder<'_, ty::FnSig<'_>> = fn_ty.fn_sig(tcx);
|
||||
|
||||
// If rustc compiles the unmodified primal, we know that this copy of the function
|
||||
// also has correct lifetimes. We know that Enzyme won't free the shadow too early
|
||||
// (or actually at all), so let's strip lifetimes when computing the layout.
|
||||
let x = tcx.instantiate_bound_regions_with_erased(fnc_binder);
|
||||
let mut new_activities = vec![];
|
||||
let mut new_positions = vec![];
|
||||
for (i, ty) in x.inputs().iter().enumerate() {
|
||||
if let Some(inner_ty) = ty.builtin_deref(true) {
|
||||
if ty.is_fn_ptr() {
|
||||
// FIXME(ZuseZ4): add a nicer error, or just figure out how to support them,
|
||||
// since Enzyme itself can handle them.
|
||||
tcx.dcx().err("function pointers are currently not supported in autodiff");
|
||||
}
|
||||
if inner_ty.is_slice() {
|
||||
// We know that the length will be passed as extra arg.
|
||||
if !da.is_empty() {
|
||||
// We are looking at a slice. The length of that slice will become an
|
||||
// extra integer on llvm level. Integers are always const.
|
||||
// However, if the slice get's duplicated, we want to know to later check the
|
||||
// size. So we mark the new size argument as FakeActivitySize.
|
||||
let activity = match da[i] {
|
||||
DiffActivity::DualOnly
|
||||
| DiffActivity::Dual
|
||||
| DiffActivity::DuplicatedOnly
|
||||
| DiffActivity::Duplicated => DiffActivity::FakeActivitySize,
|
||||
DiffActivity::Const => DiffActivity::Const,
|
||||
_ => bug!("unexpected activity for ptr/ref"),
|
||||
};
|
||||
new_activities.push(activity);
|
||||
new_positions.push(i + 1);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
// now add the extra activities coming from slices
|
||||
// Reverse order to not invalidate the indices
|
||||
for _ in 0..new_activities.len() {
|
||||
let pos = new_positions.pop().unwrap();
|
||||
let activity = new_activities.pop().unwrap();
|
||||
da.insert(pos, activity);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn find_autodiff_source_functions<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
usage_map: &UsageMap<'tcx>,
|
||||
autodiff_mono_items: Vec<(&MonoItem<'tcx>, &Instance<'tcx>)>,
|
||||
) -> Vec<AutoDiffItem> {
|
||||
let mut autodiff_items: Vec<AutoDiffItem> = vec![];
|
||||
for (item, instance) in autodiff_mono_items {
|
||||
let target_id = instance.def_id();
|
||||
let cg_fn_attr = tcx.codegen_fn_attrs(target_id).autodiff_item.clone();
|
||||
let Some(target_attrs) = cg_fn_attr else {
|
||||
continue;
|
||||
};
|
||||
let mut input_activities: Vec<DiffActivity> = target_attrs.input_activity.clone();
|
||||
if target_attrs.is_source() {
|
||||
trace!("source found: {:?}", target_id);
|
||||
}
|
||||
if !target_attrs.apply_autodiff() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let target_symbol = symbol_name_for_instance_in_crate(tcx, instance.clone(), LOCAL_CRATE);
|
||||
|
||||
let source =
|
||||
usage_map.used_map.get(&item).unwrap().into_iter().find_map(|item| match *item {
|
||||
MonoItem::Fn(ref instance_s) => {
|
||||
let source_id = instance_s.def_id();
|
||||
if let Some(ad) = &tcx.codegen_fn_attrs(source_id).autodiff_item
|
||||
&& ad.is_active()
|
||||
{
|
||||
return Some(instance_s);
|
||||
}
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
let inst = match source {
|
||||
Some(source) => source,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
debug!("source_id: {:?}", inst.def_id());
|
||||
let fn_ty = inst.ty(tcx, ty::TypingEnv::fully_monomorphized());
|
||||
assert!(fn_ty.is_fn());
|
||||
adjust_activity_to_abi(tcx, fn_ty, &mut input_activities);
|
||||
let symb = symbol_name_for_instance_in_crate(tcx, inst.clone(), LOCAL_CRATE);
|
||||
|
||||
let mut new_target_attrs = target_attrs.clone();
|
||||
new_target_attrs.input_activity = input_activities;
|
||||
let itm = new_target_attrs.into_item(symb, target_symbol);
|
||||
autodiff_items.push(itm);
|
||||
}
|
||||
|
||||
if !autodiff_items.is_empty() {
|
||||
trace!("AUTODIFF ITEMS EXIST");
|
||||
for item in &mut *autodiff_items {
|
||||
trace!("{}", &item);
|
||||
}
|
||||
}
|
||||
|
||||
autodiff_items
|
||||
}
|
|
@ -2471,6 +2471,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
}))),
|
||||
terr,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
diag.emit();
|
||||
self.abort.set(true);
|
||||
|
|
|
@ -189,6 +189,39 @@ pub enum CoverageLevel {
|
|||
Mcdc,
|
||||
}
|
||||
|
||||
/// The different settings that the `-Z autodiff` flag can have.
|
||||
#[derive(Clone, Copy, PartialEq, Hash, Debug)]
|
||||
pub enum AutoDiff {
|
||||
/// Print TypeAnalysis information
|
||||
PrintTA,
|
||||
/// Print ActivityAnalysis Information
|
||||
PrintAA,
|
||||
/// Print Performance Warnings from Enzyme
|
||||
PrintPerf,
|
||||
/// Combines the three print flags above.
|
||||
Print,
|
||||
/// Print the whole module, before running opts.
|
||||
PrintModBefore,
|
||||
/// Print the whole module just before we pass it to Enzyme.
|
||||
/// For Debug purpose, prefer the OPT flag below
|
||||
PrintModAfterOpts,
|
||||
/// Print the module after Enzyme differentiated everything.
|
||||
PrintModAfterEnzyme,
|
||||
|
||||
/// Enzyme's loose type debug helper (can cause incorrect gradients)
|
||||
LooseTypes,
|
||||
|
||||
/// More flags
|
||||
NoModOptAfter,
|
||||
/// Tell Enzyme to run LLVM Opts on each function it generated. By default off,
|
||||
/// since we already optimize the whole module after Enzyme is done.
|
||||
EnableFncOpt,
|
||||
NoVecUnroll,
|
||||
RuntimeActivity,
|
||||
/// Runs Enzyme specific Inlining
|
||||
Inline,
|
||||
}
|
||||
|
||||
/// Settings for `-Z instrument-xray` flag.
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub struct InstrumentXRay {
|
||||
|
@ -2902,7 +2935,7 @@ pub(crate) mod dep_tracking {
|
|||
};
|
||||
|
||||
use super::{
|
||||
BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, CoverageOptions,
|
||||
AutoDiff, BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, CoverageOptions,
|
||||
CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, FunctionReturn,
|
||||
InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail,
|
||||
LtoCli, MirStripDebugInfo, NextSolverConfig, OomStrategy, OptLevel, OutFileName,
|
||||
|
@ -2950,6 +2983,7 @@ pub(crate) mod dep_tracking {
|
|||
}
|
||||
|
||||
impl_dep_tracking_hash_via_hash!(
|
||||
AutoDiff,
|
||||
bool,
|
||||
usize,
|
||||
NonZero<usize>,
|
||||
|
|
|
@ -398,6 +398,7 @@ mod desc {
|
|||
pub(crate) const parse_list: &str = "a space-separated list of strings";
|
||||
pub(crate) const parse_list_with_polarity: &str =
|
||||
"a comma-separated list of strings, with elements beginning with + or -";
|
||||
pub(crate) const parse_autodiff: &str = "a comma separated list of settings: `Print`, `PrintTA`, `PrintAA`, `PrintPerf`, `PrintModBefore`, `PrintModAfterOpts`, `PrintModAfterEnzyme`, `LooseTypes`, `NoModOptAfter`, `EnableFncOpt`, `NoVecUnroll`, `Inline`";
|
||||
pub(crate) const parse_comma_list: &str = "a comma-separated list of strings";
|
||||
pub(crate) const parse_opt_comma_list: &str = parse_comma_list;
|
||||
pub(crate) const parse_number: &str = "a number";
|
||||
|
@ -1029,6 +1030,38 @@ pub mod parse {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_autodiff(slot: &mut Vec<AutoDiff>, v: Option<&str>) -> bool {
|
||||
let Some(v) = v else {
|
||||
*slot = vec![];
|
||||
return true;
|
||||
};
|
||||
let mut v: Vec<&str> = v.split(",").collect();
|
||||
v.sort_unstable();
|
||||
for &val in v.iter() {
|
||||
let variant = match val {
|
||||
"PrintTA" => AutoDiff::PrintTA,
|
||||
"PrintAA" => AutoDiff::PrintAA,
|
||||
"PrintPerf" => AutoDiff::PrintPerf,
|
||||
"Print" => AutoDiff::Print,
|
||||
"PrintModBefore" => AutoDiff::PrintModBefore,
|
||||
"PrintModAfterOpts" => AutoDiff::PrintModAfterOpts,
|
||||
"PrintModAfterEnzyme" => AutoDiff::PrintModAfterEnzyme,
|
||||
"LooseTypes" => AutoDiff::LooseTypes,
|
||||
"NoModOptAfter" => AutoDiff::NoModOptAfter,
|
||||
"EnableFncOpt" => AutoDiff::EnableFncOpt,
|
||||
"NoVecUnroll" => AutoDiff::NoVecUnroll,
|
||||
"Inline" => AutoDiff::Inline,
|
||||
_ => {
|
||||
// FIXME(ZuseZ4): print an error saying which value is not recognized
|
||||
return false;
|
||||
}
|
||||
};
|
||||
slot.push(variant);
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn parse_instrument_coverage(
|
||||
slot: &mut InstrumentCoverage,
|
||||
v: Option<&str>,
|
||||
|
@ -1736,6 +1769,22 @@ options! {
|
|||
either `loaded` or `not-loaded`."),
|
||||
assume_incomplete_release: bool = (false, parse_bool, [TRACKED],
|
||||
"make cfg(version) treat the current version as incomplete (default: no)"),
|
||||
autodiff: Vec<crate::config::AutoDiff> = (Vec::new(), parse_autodiff, [TRACKED],
|
||||
"a list of optional autodiff flags to enable
|
||||
Optional extra settings:
|
||||
`=PrintTA`
|
||||
`=PrintAA`
|
||||
`=PrintPerf`
|
||||
`=Print`
|
||||
`=PrintModBefore`
|
||||
`=PrintModAfterOpts`
|
||||
`=PrintModAfterEnzyme`
|
||||
`=LooseTypes`
|
||||
`=NoModOptAfter`
|
||||
`=EnableFncOpt`
|
||||
`=NoVecUnroll`
|
||||
`=Inline`
|
||||
Multiple options can be combined with commas."),
|
||||
#[rustc_lint_opt_deny_field_access("use `Session::binary_dep_depinfo` instead of this field")]
|
||||
binary_dep_depinfo: bool = (false, parse_bool, [TRACKED],
|
||||
"include artifacts (sysroot, crate dependencies) used during compilation in dep-info \
|
||||
|
|
|
@ -502,7 +502,6 @@ symbols! {
|
|||
augmented_assignments,
|
||||
auto_traits,
|
||||
autodiff,
|
||||
autodiff_fallback,
|
||||
automatically_derived,
|
||||
avx,
|
||||
avx512_target_feature,
|
||||
|
@ -568,7 +567,6 @@ symbols! {
|
|||
cfg_accessible,
|
||||
cfg_attr,
|
||||
cfg_attr_multi,
|
||||
cfg_autodiff_fallback,
|
||||
cfg_boolean_literals,
|
||||
cfg_doctest,
|
||||
cfg_emscripten_wasm_eh,
|
||||
|
|
|
@ -1392,9 +1392,13 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
mut values: Option<ty::ParamEnvAnd<'tcx, ValuePairs<'tcx>>>,
|
||||
terr: TypeError<'tcx>,
|
||||
prefer_label: bool,
|
||||
override_span: Option<Span>,
|
||||
) {
|
||||
let span = cause.span;
|
||||
|
||||
// We use `override_span` when we want the error to point at a `Span` other than
|
||||
// `cause.span`. This is used in E0271, when a closure is passed in where the return type
|
||||
// isn't what was expected. We want to point at the closure's return type (or expression),
|
||||
// instead of the expression where the closure is passed as call argument.
|
||||
let span = override_span.unwrap_or(cause.span);
|
||||
// For some types of errors, expected-found does not make
|
||||
// sense, so just ignore the values we were given.
|
||||
if let TypeError::CyclicTy(_) = terr {
|
||||
|
@ -2057,6 +2061,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
Some(param_env.and(trace.values)),
|
||||
terr,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
diag
|
||||
}
|
||||
|
|
|
@ -708,6 +708,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
None,
|
||||
TypeError::Sorts(ty::error::ExpectedFound::new(expected_ty, ct_ty)),
|
||||
false,
|
||||
None,
|
||||
);
|
||||
diag
|
||||
}
|
||||
|
@ -931,14 +932,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
let hir_id = self.tcx.local_def_id_to_hir_id(obligation.cause.body_id);
|
||||
let body_id = match self.tcx.hir_node(hir_id) {
|
||||
hir::Node::Item(hir::Item {
|
||||
kind: hir::ItemKind::Fn { body: body_id, .. }, ..
|
||||
}) => body_id,
|
||||
_ => return false,
|
||||
};
|
||||
let ControlFlow::Break(expr) = (FindMethodSubexprOfTry { search_span: span })
|
||||
.visit_body(self.tcx.hir().body(*body_id))
|
||||
let Some(body_id) = self.tcx.hir_node(hir_id).body_id() else { return false };
|
||||
let ControlFlow::Break(expr) =
|
||||
(FindMethodSubexprOfTry { search_span: span }).visit_body(self.tcx.hir().body(body_id))
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
@ -1385,9 +1381,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
_ => (None, error.err),
|
||||
};
|
||||
|
||||
let msg = values
|
||||
let (msg, span, closure_span) = values
|
||||
.and_then(|(predicate, normalized_term, expected_term)| {
|
||||
self.maybe_detailed_projection_msg(predicate, normalized_term, expected_term)
|
||||
self.maybe_detailed_projection_msg(
|
||||
obligation.cause.span,
|
||||
predicate,
|
||||
normalized_term,
|
||||
expected_term,
|
||||
)
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
let mut cx = FmtPrinter::new_with_limit(
|
||||
|
@ -1395,12 +1396,39 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
Namespace::TypeNS,
|
||||
rustc_session::Limit(10),
|
||||
);
|
||||
(
|
||||
with_forced_trimmed_paths!(format!("type mismatch resolving `{}`", {
|
||||
self.resolve_vars_if_possible(predicate).print(&mut cx).unwrap();
|
||||
cx.into_buffer()
|
||||
}))
|
||||
})),
|
||||
obligation.cause.span,
|
||||
None,
|
||||
)
|
||||
});
|
||||
let mut diag = struct_span_code_err!(self.dcx(), obligation.cause.span, E0271, "{msg}");
|
||||
let mut diag = struct_span_code_err!(self.dcx(), span, E0271, "{msg}");
|
||||
if let Some(span) = closure_span {
|
||||
// Mark the closure decl so that it is seen even if we are pointing at the return
|
||||
// type or expression.
|
||||
//
|
||||
// error[E0271]: expected `{closure@foo.rs:41:16}` to be a closure that returns
|
||||
// `Unit3`, but it returns `Unit4`
|
||||
// --> $DIR/foo.rs:43:17
|
||||
// |
|
||||
// LL | let v = Unit2.m(
|
||||
// | - required by a bound introduced by this call
|
||||
// ...
|
||||
// LL | f: |x| {
|
||||
// | --- /* this span */
|
||||
// LL | drop(x);
|
||||
// LL | Unit4
|
||||
// | ^^^^^ expected `Unit3`, found `Unit4`
|
||||
// |
|
||||
diag.span_label(span, "this closure");
|
||||
if !span.overlaps(obligation.cause.span) {
|
||||
// Point at the binding corresponding to the closure where it is used.
|
||||
diag.span_label(obligation.cause.span, "closure used here");
|
||||
}
|
||||
}
|
||||
|
||||
let secondary_span = (|| {
|
||||
let ty::PredicateKind::Clause(ty::ClauseKind::Projection(proj)) =
|
||||
|
@ -1471,6 +1499,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
}),
|
||||
err,
|
||||
false,
|
||||
Some(span),
|
||||
);
|
||||
self.note_obligation_cause(&mut diag, obligation);
|
||||
diag.emit()
|
||||
|
@ -1479,34 +1508,66 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||
|
||||
fn maybe_detailed_projection_msg(
|
||||
&self,
|
||||
mut span: Span,
|
||||
projection_term: ty::AliasTerm<'tcx>,
|
||||
normalized_ty: ty::Term<'tcx>,
|
||||
expected_ty: ty::Term<'tcx>,
|
||||
) -> Option<String> {
|
||||
) -> Option<(String, Span, Option<Span>)> {
|
||||
let trait_def_id = projection_term.trait_def_id(self.tcx);
|
||||
let self_ty = projection_term.self_ty();
|
||||
|
||||
with_forced_trimmed_paths! {
|
||||
if self.tcx.is_lang_item(projection_term.def_id, LangItem::FnOnceOutput) {
|
||||
let fn_kind = self_ty.prefix_string(self.tcx);
|
||||
let (span, closure_span) = if let ty::Closure(def_id, _) = self_ty.kind() {
|
||||
let def_span = self.tcx.def_span(def_id);
|
||||
if let Some(local_def_id) = def_id.as_local()
|
||||
&& let node = self.tcx.hir_node_by_def_id(local_def_id)
|
||||
&& let Some(fn_decl) = node.fn_decl()
|
||||
&& let Some(id) = node.body_id()
|
||||
{
|
||||
span = match fn_decl.output {
|
||||
hir::FnRetTy::Return(ty) => ty.span,
|
||||
hir::FnRetTy::DefaultReturn(_) => {
|
||||
let body = self.tcx.hir().body(id);
|
||||
match body.value.kind {
|
||||
hir::ExprKind::Block(
|
||||
hir::Block { expr: Some(expr), .. },
|
||||
_,
|
||||
) => expr.span,
|
||||
hir::ExprKind::Block(
|
||||
hir::Block {
|
||||
expr: None, stmts: [.., last], ..
|
||||
},
|
||||
_,
|
||||
) => last.span,
|
||||
_ => body.value.span,
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
(span, Some(def_span))
|
||||
} else {
|
||||
(span, None)
|
||||
};
|
||||
let item = match self_ty.kind() {
|
||||
ty::FnDef(def, _) => self.tcx.item_name(*def).to_string(),
|
||||
_ => self_ty.to_string(),
|
||||
};
|
||||
Some(format!(
|
||||
Some((format!(
|
||||
"expected `{item}` to be a {fn_kind} that returns `{expected_ty}`, but it \
|
||||
returns `{normalized_ty}`",
|
||||
))
|
||||
), span, closure_span))
|
||||
} else if self.tcx.is_lang_item(trait_def_id, LangItem::Future) {
|
||||
Some(format!(
|
||||
Some((format!(
|
||||
"expected `{self_ty}` to be a future that resolves to `{expected_ty}`, but it \
|
||||
resolves to `{normalized_ty}`"
|
||||
))
|
||||
), span, None))
|
||||
} else if Some(trait_def_id) == self.tcx.get_diagnostic_item(sym::Iterator) {
|
||||
Some(format!(
|
||||
Some((format!(
|
||||
"expected `{self_ty}` to be an iterator that yields `{expected_ty}`, but it \
|
||||
yields `{normalized_ty}`"
|
||||
))
|
||||
), span, None))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -716,6 +716,12 @@ impl AtomicBool {
|
|||
/// AcqRel | AcqRel | Acquire
|
||||
/// SeqCst | SeqCst | SeqCst
|
||||
///
|
||||
/// `compare_and_swap` and `compare_exchange` also differ in their return type. You can use
|
||||
/// `compare_exchange(...).unwrap_or_else(|x| x)` to recover the behavior of `compare_and_swap`,
|
||||
/// but in most cases it is more idiomatic to check whether the return value is `Ok` or `Err`
|
||||
/// rather than to infer success vs failure based on the value that was read.
|
||||
///
|
||||
/// During migration, consider whether it makes sense to use `compare_exchange_weak` instead.
|
||||
/// `compare_exchange_weak` is allowed to fail spuriously even when the comparison succeeds,
|
||||
/// which allows the compiler to generate better assembly code when the compare and swap
|
||||
/// is used in a loop.
|
||||
|
@ -1651,6 +1657,12 @@ impl<T> AtomicPtr<T> {
|
|||
/// AcqRel | AcqRel | Acquire
|
||||
/// SeqCst | SeqCst | SeqCst
|
||||
///
|
||||
/// `compare_and_swap` and `compare_exchange` also differ in their return type. You can use
|
||||
/// `compare_exchange(...).unwrap_or_else(|x| x)` to recover the behavior of `compare_and_swap`,
|
||||
/// but in most cases it is more idiomatic to check whether the return value is `Ok` or `Err`
|
||||
/// rather than to infer success vs failure based on the value that was read.
|
||||
///
|
||||
/// During migration, consider whether it makes sense to use `compare_exchange_weak` instead.
|
||||
/// `compare_exchange_weak` is allowed to fail spuriously even when the comparison succeeds,
|
||||
/// which allows the compiler to generate better assembly code when the compare and swap
|
||||
/// is used in a loop.
|
||||
|
@ -2771,6 +2783,12 @@ macro_rules! atomic_int {
|
|||
/// AcqRel | AcqRel | Acquire
|
||||
/// SeqCst | SeqCst | SeqCst
|
||||
///
|
||||
/// `compare_and_swap` and `compare_exchange` also differ in their return type. You can use
|
||||
/// `compare_exchange(...).unwrap_or_else(|x| x)` to recover the behavior of `compare_and_swap`,
|
||||
/// but in most cases it is more idiomatic to check whether the return value is `Ok` or `Err`
|
||||
/// rather than to infer success vs failure based on the value that was read.
|
||||
///
|
||||
/// During migration, consider whether it makes sense to use `compare_exchange_weak` instead.
|
||||
/// `compare_exchange_weak` is allowed to fail spuriously even when the comparison succeeds,
|
||||
/// which allows the compiler to generate better assembly code when the compare and swap
|
||||
/// is used in a loop.
|
||||
|
|
|
@ -1049,9 +1049,12 @@ pub fn rustc_cargo(
|
|||
// <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Internal.20lint.20for.20raw.20.60print!.60.20and.20.60println!.60.3F>.
|
||||
cargo.rustflag("-Zon-broken-pipe=kill");
|
||||
|
||||
if builder.config.llvm_enzyme {
|
||||
cargo.rustflag("-l").rustflag("Enzyme-19");
|
||||
}
|
||||
// We temporarily disable linking here as part of some refactoring.
|
||||
// This way, people can manually use -Z llvm-plugins and -C passes=enzyme for now.
|
||||
// In a follow-up PR, we will re-enable linking here and load the pass for them.
|
||||
//if builder.config.llvm_enzyme {
|
||||
// cargo.rustflag("-l").rustflag("Enzyme-19");
|
||||
//}
|
||||
|
||||
// Building with protected visibility reduces the number of dynamic relocations needed, giving
|
||||
// us a faster startup time. However GNU ld < 2.40 will error if we try to link a shared object
|
||||
|
|
|
@ -43,7 +43,7 @@ environment.
|
|||
|
||||
## Formatting and linting Python code
|
||||
|
||||
The Rust repository contains quite a lof of Python code. We try to keep
|
||||
The Rust repository contains quite a lot of Python code. We try to keep
|
||||
it both linted and formatted by the [ruff][ruff] tool.
|
||||
|
||||
When modifying Python code, use this command to format it:
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
`rustc` supports doing profile-guided optimization (PGO).
|
||||
This chapter describes what PGO is, what it is good for, and how it can be used.
|
||||
|
||||
## What Is Profiled-Guided Optimization?
|
||||
## What Is Profile-Guided Optimization?
|
||||
|
||||
The basic concept of PGO is to collect data about the typical execution of
|
||||
a program (e.g. which branches it is likely to take) and then use this data
|
||||
|
|
|
@ -36,10 +36,6 @@ For a full history of changes in the Rust 2024 style edition, see the git
|
|||
history of the style guide. Notable changes in the Rust 2024 style edition
|
||||
include:
|
||||
|
||||
- [#114764](https://github.com/rust-lang/rust/pull/114764) As the last member
|
||||
of a delimited expression, delimited expressions are generally combinable,
|
||||
regardless of the number of members. Previously only applied with exactly
|
||||
one member (except for closures with explicit blocks).
|
||||
- Miscellaneous `rustfmt` bugfixes.
|
||||
- Use version-sort (sort `x8`, `x16`, `x32`, `x64`, `x128` in that order).
|
||||
- Change "ASCIIbetical" sort to Unicode-aware "non-lowercase before lowercase".
|
||||
|
|
|
@ -818,11 +818,11 @@ E.g., `&&Some(foo)` matches, `Foo(4, Bar)` does not.
|
|||
|
||||
## Combinable expressions
|
||||
|
||||
When the last argument in a function call is formatted across
|
||||
multiple-lines, format the outer call as if it were a single-line call,
|
||||
Where a function call has a single argument, and that argument is formatted
|
||||
across multiple-lines, format the outer call as if it were a single-line call,
|
||||
if the result fits. Apply the same combining behaviour to any similar
|
||||
expressions which have multi-line, block-indented lists of sub-expressions
|
||||
delimited by parentheses, brackets, or braces. E.g.,
|
||||
delimited by parentheses (e.g., macros or tuple struct literals). E.g.,
|
||||
|
||||
```rust
|
||||
foo(bar(
|
||||
|
@ -848,61 +848,20 @@ let arr = [combinable(
|
|||
an_expr,
|
||||
another_expr,
|
||||
)];
|
||||
|
||||
let x = Thing(an_expr, another_expr, match cond {
|
||||
A => 1,
|
||||
B => 2,
|
||||
});
|
||||
|
||||
let x = format!("Stuff: {}", [
|
||||
an_expr,
|
||||
another_expr,
|
||||
]);
|
||||
|
||||
let x = func(an_expr, another_expr, SomeStruct {
|
||||
field: this_is_long,
|
||||
another_field: 123,
|
||||
});
|
||||
```
|
||||
|
||||
Apply this behavior recursively.
|
||||
|
||||
If the last argument is a multi-line closure with an explicit block,
|
||||
only apply the combining behavior if there are no other closure arguments.
|
||||
For a function with multiple arguments, if the last argument is a multi-line
|
||||
closure with an explicit block, there are no other closure arguments, and all
|
||||
the arguments and the first line of the closure fit on the first line, use the
|
||||
same combining behavior:
|
||||
|
||||
```rust
|
||||
// Combinable
|
||||
foo(first_arg, x, |param| {
|
||||
action();
|
||||
foo(param)
|
||||
})
|
||||
// Not combinable, because the closure is not the last argument
|
||||
foo(
|
||||
first_arg,
|
||||
|param| {
|
||||
action();
|
||||
foo(param)
|
||||
},
|
||||
whatever,
|
||||
)
|
||||
// Not combinable, because the first line of the closure does not fit
|
||||
foo(
|
||||
first_arg,
|
||||
x,
|
||||
move |very_long_param_causing_line_to_overflow| -> Bar {
|
||||
action();
|
||||
foo(param)
|
||||
},
|
||||
)
|
||||
// Not combinable, because there is more than one closure argument
|
||||
foo(
|
||||
first_arg,
|
||||
|x| x.bar(),
|
||||
|param| {
|
||||
action();
|
||||
foo(param)
|
||||
},
|
||||
)
|
||||
```
|
||||
|
||||
## Ranges
|
||||
|
|
23
src/doc/unstable-book/src/compiler-flags/autodiff.md
Normal file
23
src/doc/unstable-book/src/compiler-flags/autodiff.md
Normal file
|
@ -0,0 +1,23 @@
|
|||
# `autodiff`
|
||||
|
||||
The tracking issue for this feature is: [#124509](https://github.com/rust-lang/rust/issues/124509).
|
||||
|
||||
------------------------
|
||||
|
||||
This feature allows you to differentiate functions using automatic differentiation.
|
||||
Set the `-Zautodiff=<options>` compiler flag to adjust the behaviour of the autodiff feature.
|
||||
Multiple options can be separated with a comma. Valid options are:
|
||||
|
||||
`PrintTA` - print Type Analysis Information
|
||||
`PrintAA` - print Activity Analysis Information
|
||||
`PrintPerf` - print Performance Warnings from Enzyme
|
||||
`Print` - prints all intermediate transformations
|
||||
`PrintModBefore` - print the whole module, before running opts
|
||||
`PrintModAfterOpts` - print the whole module just before we pass it to Enzyme
|
||||
`PrintModAfterEnzyme` - print the module after Enzyme differentiated everything
|
||||
`LooseTypes` - Enzyme's loose type debug helper (can cause incorrect gradients)
|
||||
`Inline` - runs Enzyme specific Inlining
|
||||
`NoModOptAfter` - do not optimize the module after Enzyme is done
|
||||
`EnableFncOpt` - tell Enzyme to run LLVM Opts on each function it generated
|
||||
`NoVecUnroll` - do not unroll vectorized loops
|
||||
`RuntimeActivity` - allow specifying activity at runtime
|
|
@ -1 +1 @@
|
|||
Subproject commit 2fe5164a2423dd67ef25e2c4fb204fd06362494b
|
||||
Subproject commit 0e5fa4a3d475f4dece489c9e06b11164f83789f5
|
|
@ -817,7 +817,6 @@ mod test {
|
|||
options.inline_config = HashMap::from([("version".to_owned(), "Two".to_owned())]);
|
||||
let config = get_config(None, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
assert_eq!(config.overflow_delimited_expr(), true);
|
||||
}
|
||||
|
||||
#[nightly_only_test]
|
||||
|
@ -827,7 +826,6 @@ mod test {
|
|||
let config_file = Some(Path::new("tests/config/style-edition/just-version"));
|
||||
let config = get_config(config_file, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
assert_eq!(config.overflow_delimited_expr(), true);
|
||||
}
|
||||
|
||||
#[nightly_only_test]
|
||||
|
@ -872,7 +870,6 @@ mod test {
|
|||
]);
|
||||
let config = get_config(None, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
assert_eq!(config.overflow_delimited_expr(), true);
|
||||
}
|
||||
|
||||
#[nightly_only_test]
|
||||
|
@ -938,7 +935,6 @@ mod test {
|
|||
options.style_edition = Some(StyleEdition::Edition2024);
|
||||
let config = get_config(None, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
assert_eq!(config.overflow_delimited_expr(), true);
|
||||
}
|
||||
|
||||
#[nightly_only_test]
|
||||
|
@ -948,6 +944,8 @@ mod test {
|
|||
let config_file = Some(Path::new("tests/config/style-edition/overrides"));
|
||||
let config = get_config(config_file, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
// FIXME: this test doesn't really exercise anything, since
|
||||
// `overflow_delimited_expr` is disabled by default in edition 2024.
|
||||
assert_eq!(config.overflow_delimited_expr(), false);
|
||||
}
|
||||
|
||||
|
@ -959,7 +957,8 @@ mod test {
|
|||
options.inline_config =
|
||||
HashMap::from([("overflow_delimited_expr".to_owned(), "false".to_owned())]);
|
||||
let config = get_config(config_file, Some(options));
|
||||
assert_eq!(config.style_edition(), StyleEdition::Edition2024);
|
||||
// FIXME: this test doesn't really exercise anything, since
|
||||
// `overflow_delimited_expr` is disabled by default in edition 2024.
|
||||
assert_eq!(config.overflow_delimited_expr(), false);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -848,7 +848,7 @@ binop_separator = "Front"
|
|||
remove_nested_parens = true
|
||||
combine_control_expr = true
|
||||
short_array_element_width_threshold = 10
|
||||
overflow_delimited_expr = true
|
||||
overflow_delimited_expr = false
|
||||
struct_field_align_threshold = 0
|
||||
enum_discrim_align_threshold = 0
|
||||
match_arm_blocks = true
|
||||
|
|
|
@ -627,7 +627,7 @@ config_option_with_style_edition_default!(
|
|||
RemoveNestedParens, bool, _ => true;
|
||||
CombineControlExpr, bool, _ => true;
|
||||
ShortArrayElementWidthThreshold, usize, _ => 10;
|
||||
OverflowDelimitedExpr, bool, Edition2024 => true, _ => false;
|
||||
OverflowDelimitedExpr, bool, _ => false;
|
||||
StructFieldAlignThreshold, usize, _ => 0;
|
||||
EnumDiscrimAlignThreshold, usize, _ => 0;
|
||||
MatchArmBlocks, bool, _ => true;
|
||||
|
|
|
@ -25,10 +25,13 @@ fn combine_blocklike() {
|
|||
y: value2,
|
||||
});
|
||||
|
||||
do_thing(x, Bar {
|
||||
do_thing(
|
||||
x,
|
||||
Bar {
|
||||
x: value,
|
||||
y: value2,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
do_thing(
|
||||
x,
|
||||
|
@ -46,12 +49,15 @@ fn combine_blocklike() {
|
|||
value4_with_longer_name,
|
||||
]);
|
||||
|
||||
do_thing(x, &[
|
||||
do_thing(
|
||||
x,
|
||||
&[
|
||||
value_with_longer_name,
|
||||
value2_with_longer_name,
|
||||
value3_with_longer_name,
|
||||
value4_with_longer_name,
|
||||
]);
|
||||
],
|
||||
);
|
||||
|
||||
do_thing(
|
||||
x,
|
||||
|
@ -71,12 +77,15 @@ fn combine_blocklike() {
|
|||
value4_with_longer_name,
|
||||
]);
|
||||
|
||||
do_thing(x, vec![
|
||||
do_thing(
|
||||
x,
|
||||
vec![
|
||||
value_with_longer_name,
|
||||
value2_with_longer_name,
|
||||
value3_with_longer_name,
|
||||
value4_with_longer_name,
|
||||
]);
|
||||
],
|
||||
);
|
||||
|
||||
do_thing(
|
||||
x,
|
||||
|
@ -99,22 +108,28 @@ fn combine_blocklike() {
|
|||
}
|
||||
|
||||
fn combine_struct_sample() {
|
||||
let identity = verify(&ctx, VerifyLogin {
|
||||
let identity = verify(
|
||||
&ctx,
|
||||
VerifyLogin {
|
||||
type_: LoginType::Username,
|
||||
username: args.username.clone(),
|
||||
password: Some(args.password.clone()),
|
||||
domain: None,
|
||||
})?;
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
fn combine_macro_sample() {
|
||||
rocket::ignite()
|
||||
.mount("/", routes![
|
||||
.mount(
|
||||
"/",
|
||||
routes![
|
||||
http::auth::login,
|
||||
http::auth::logout,
|
||||
http::cors::options,
|
||||
http::action::dance,
|
||||
http::action::sleep,
|
||||
])
|
||||
],
|
||||
)
|
||||
.launch();
|
||||
}
|
||||
|
|
|
@ -21,18 +21,14 @@ LL | true
|
|||
found type `bool`
|
||||
|
||||
error[E0271]: expected `{closure@dont-ice-for-type-mismatch-in-closure-in-async.rs:6:10}` to be a closure that returns `bool`, but it returns `Option<()>`
|
||||
--> $DIR/dont-ice-for-type-mismatch-in-closure-in-async.rs:6:10
|
||||
--> $DIR/dont-ice-for-type-mismatch-in-closure-in-async.rs:6:16
|
||||
|
|
||||
LL | call(|| -> Option<()> {
|
||||
| _____----_^
|
||||
| | |
|
||||
| | required by a bound introduced by this call
|
||||
LL | |
|
||||
LL | | if true {
|
||||
LL | | false
|
||||
... |
|
||||
LL | | })
|
||||
| |_____^ expected `bool`, found `Option<()>`
|
||||
| ---- ------^^^^^^^^^^
|
||||
| | | |
|
||||
| | | expected `bool`, found `Option<()>`
|
||||
| | this closure
|
||||
| required by a bound introduced by this call
|
||||
|
|
||||
= note: expected type `bool`
|
||||
found enum `Option<()>`
|
||||
|
|
25
tests/ui/closures/return-type-doesnt-match-bound.rs
Normal file
25
tests/ui/closures/return-type-doesnt-match-bound.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
use std::error::Error;
|
||||
use std::process::exit;
|
||||
|
||||
fn foo<F>(f: F) -> ()
|
||||
where
|
||||
F: FnOnce() -> Result<(), Box<dyn Error>>,
|
||||
{
|
||||
f().or_else(|e| -> ! { //~ ERROR to be a closure that returns
|
||||
eprintln!("{:?}", e);
|
||||
exit(1)
|
||||
});
|
||||
}
|
||||
|
||||
fn bar<F>(f: F) -> ()
|
||||
where
|
||||
F: FnOnce() -> Result<(), Box<dyn Error>>,
|
||||
{
|
||||
let c = |e| -> ! { //~ ERROR to be a closure that returns
|
||||
eprintln!("{:?}", e);
|
||||
exit(1)
|
||||
};
|
||||
f().or_else(c);
|
||||
}
|
||||
|
||||
fn main() {}
|
37
tests/ui/closures/return-type-doesnt-match-bound.stderr
Normal file
37
tests/ui/closures/return-type-doesnt-match-bound.stderr
Normal file
|
@ -0,0 +1,37 @@
|
|||
error[E0271]: expected `{closure@return-type-doesnt-match-bound.rs:8:17}` to be a closure that returns `Result<(), _>`, but it returns `!`
|
||||
--> $DIR/return-type-doesnt-match-bound.rs:8:24
|
||||
|
|
||||
LL | f().or_else(|e| -> ! {
|
||||
| ------- -------^
|
||||
| | | |
|
||||
| | | expected `Result<(), _>`, found `!`
|
||||
| | this closure
|
||||
| required by a bound introduced by this call
|
||||
|
|
||||
= note: expected enum `Result<(), _>`
|
||||
found type `!`
|
||||
note: required by a bound in `Result::<T, E>::or_else`
|
||||
--> $SRC_DIR/core/src/result.rs:LL:COL
|
||||
|
||||
error[E0271]: expected `{closure@return-type-doesnt-match-bound.rs:18:13}` to be a closure that returns `Result<(), _>`, but it returns `!`
|
||||
--> $DIR/return-type-doesnt-match-bound.rs:18:20
|
||||
|
|
||||
LL | let c = |e| -> ! {
|
||||
| -------^
|
||||
| | |
|
||||
| | expected `Result<(), _>`, found `!`
|
||||
| this closure
|
||||
...
|
||||
LL | f().or_else(c);
|
||||
| ------- - closure used here
|
||||
| |
|
||||
| required by a bound introduced by this call
|
||||
|
|
||||
= note: expected enum `Result<(), _>`
|
||||
found type `!`
|
||||
note: required by a bound in `Result::<T, E>::or_else`
|
||||
--> $SRC_DIR/core/src/result.rs:LL:COL
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0271`.
|
|
@ -1,5 +1,5 @@
|
|||
error[E0308]: mismatched types
|
||||
--> $DIR/long-E0308.rs:46:9
|
||||
--> $DIR/long-E0308.rs:48:9
|
||||
|
|
||||
LL | let x: Atype<
|
||||
| _____________-
|
||||
|
@ -20,11 +20,11 @@ LL | | ))))))))))))))))))))))))))))));
|
|||
|
|
||||
= note: expected struct `Atype<Btype<..., ...>, ...>`
|
||||
found enum `Result<Result<..., ...>, ...>`
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.ascii/long-E0308.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/long-E0308.rs:59:26
|
||||
--> $DIR/long-E0308.rs:61:26
|
||||
|
|
||||
LL | ))))))))))))))))) == Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(O...
|
||||
| __________________________^
|
||||
|
@ -36,11 +36,11 @@ LL | | ))))))))))))))))))))))));
|
|||
|
|
||||
= note: expected enum `Option<Result<..., ...>>`
|
||||
found enum `Result<Result<..., ...>, ...>`
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.ascii/long-E0308.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/long-E0308.rs:90:9
|
||||
--> $DIR/long-E0308.rs:92:9
|
||||
|
|
||||
LL | let x: Atype<
|
||||
| ____________-
|
||||
|
@ -56,11 +56,11 @@ LL | | > = ();
|
|||
|
|
||||
= note: expected struct `Atype<Btype<..., ...>, ...>`
|
||||
found unit type `()`
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.ascii/long-E0308.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/long-E0308.rs:93:17
|
||||
--> $DIR/long-E0308.rs:95:17
|
||||
|
|
||||
LL | let _: () = Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(O...
|
||||
| ____________--___^
|
||||
|
@ -74,7 +74,7 @@ LL | | ))))))))))))))))))))))));
|
|||
|
|
||||
= note: expected unit type `()`
|
||||
found enum `Result<Result<..., ...>, ...>`
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.ascii/long-E0308.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
//@ revisions: ascii unicode
|
||||
//@[ascii] compile-flags: --diagnostic-width=60 -Zwrite-long-types-to-disk=yes
|
||||
//@[unicode] compile-flags: -Zunstable-options --json=diagnostic-unicode --diagnostic-width=60 -Zwrite-long-types-to-disk=yes
|
||||
//@ normalize-stderr: "long-type-\d+" -> "long-type-hash"
|
||||
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type-\d+.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type-hash.txt'"
|
||||
|
||||
mod a {
|
||||
// Force the "short path for unique types" machinery to trip up
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0308]: mismatched types
|
||||
╭▸ $DIR/long-E0308.rs:46:9
|
||||
╭▸ $DIR/long-E0308.rs:48:9
|
||||
│
|
||||
LL │ let x: Atype<
|
||||
│ ┌─────────────┘
|
||||
|
@ -20,11 +20,11 @@ LL │ ┃ ))))))))))))))))))))))))))))));
|
|||
│
|
||||
├ note: expected struct `Atype<Btype<..., ...>, ...>`
|
||||
│ found enum `Result<Result<..., ...>, ...>`
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.unicode/long-E0308.long-type-hash.txt'
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
╰ note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
╭▸ $DIR/long-E0308.rs:59:26
|
||||
╭▸ $DIR/long-E0308.rs:61:26
|
||||
│
|
||||
LL │ ))))))))))))))))) == Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(…
|
||||
│ ┏━━━━━━━━━━━━━━━━━━━━━━━━━━┛
|
||||
|
@ -36,11 +36,11 @@ LL │ ┃ ))))))))))))))))))))))));
|
|||
│
|
||||
├ note: expected enum `Option<Result<..., ...>>`
|
||||
│ found enum `Result<Result<..., ...>, ...>`
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.unicode/long-E0308.long-type-hash.txt'
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
╰ note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
╭▸ $DIR/long-E0308.rs:90:9
|
||||
╭▸ $DIR/long-E0308.rs:92:9
|
||||
│
|
||||
LL │ let x: Atype<
|
||||
│ ┌────────────┘
|
||||
|
@ -56,11 +56,11 @@ LL │ │ > = ();
|
|||
│
|
||||
├ note: expected struct `Atype<Btype<..., ...>, ...>`
|
||||
│ found unit type `()`
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.unicode/long-E0308.long-type-hash.txt'
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
╰ note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error[E0308]: mismatched types
|
||||
╭▸ $DIR/long-E0308.rs:93:17
|
||||
╭▸ $DIR/long-E0308.rs:95:17
|
||||
│
|
||||
LL │ let _: () = Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(Ok(…
|
||||
│ ┏━━━━━━━━━━━━┬─━━━┛
|
||||
|
@ -74,7 +74,7 @@ LL │ ┃ ))))))))))))))))))))))));
|
|||
│
|
||||
├ note: expected unit type `()`
|
||||
│ found enum `Result<Result<..., ...>, ...>`
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/long-E0308.unicode/long-E0308.long-type-hash.txt'
|
||||
├ note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
╰ note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//@ compile-flags: --diagnostic-width=60 -Zwrite-long-types-to-disk=yes
|
||||
//@ normalize-stderr: "long-type-\d+" -> "long-type-hash"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type-\d+.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type-hash.txt'"
|
||||
|
||||
type A = (String, String, String, String);
|
||||
type B = (A, A, A, A);
|
||||
type C = (B, B, B, B);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0382]: use of moved value: `x`
|
||||
--> $DIR/non-copy-type-moved.rs:14:14
|
||||
--> $DIR/non-copy-type-moved.rs:16:14
|
||||
|
|
||||
LL | fn foo(x: D) {
|
||||
| - move occurs because `x` has type `((..., ..., ..., ...), ..., ..., ...)`, which does not implement the `Copy` trait
|
||||
|
@ -8,7 +8,7 @@ LL | let _a = x;
|
|||
LL | let _b = x;
|
||||
| ^ value used here after move
|
||||
|
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/non-copy-type-moved/non-copy-type-moved.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
help: consider cloning the value if the performance cost is acceptable
|
||||
|
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//@ compile-flags: --diagnostic-width=100 -Zwrite-long-types-to-disk=yes
|
||||
//@ normalize-stderr: "long-type-\d+" -> "long-type-hash"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type-\d+.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type-hash.txt'"
|
||||
|
||||
type A = (i32, i32, i32, i32);
|
||||
type B = (A, A, A, A);
|
||||
type C = (B, B, B, B);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0308]: mismatched types
|
||||
--> $DIR/secondary-label-with-long-type.rs:9:9
|
||||
--> $DIR/secondary-label-with-long-type.rs:11:9
|
||||
|
|
||||
LL | let () = x;
|
||||
| ^^ - this expression has type `((..., ..., ..., ...), ..., ..., ...)`
|
||||
|
@ -8,7 +8,7 @@ LL | let () = x;
|
|||
|
|
||||
= note: expected tuple `((..., ..., ..., ...), ..., ..., ...)`
|
||||
found unit type `()`
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/diagnostic-width/secondary-label-with-long-type/secondary-label-with-long-type.long-type-hash.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
|
|
@ -36,12 +36,10 @@ trait Ty<'a> {
|
|||
|
||||
fn main() {
|
||||
let v = Unit2.m(
|
||||
L {
|
||||
//~^ ERROR to be a closure that returns `Unit3`, but it returns `Unit4`
|
||||
//~| ERROR type mismatch
|
||||
L { //~ ERROR type mismatch
|
||||
f: |x| {
|
||||
drop(x);
|
||||
Unit4
|
||||
Unit4 //~ ERROR to be a closure that returns `Unit3`, but it returns `Unit4`
|
||||
},
|
||||
},
|
||||
);
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
error[E0271]: type mismatch resolving `<L<{closure@issue-62203-hrtb-ice.rs:42:16}> as T0<'r, (&u8,)>>::O == <_ as Ty<'r>>::V`
|
||||
error[E0271]: type mismatch resolving `<L<{closure@issue-62203-hrtb-ice.rs:40:16}> as T0<'r, (&u8,)>>::O == <_ as Ty<'r>>::V`
|
||||
--> $DIR/issue-62203-hrtb-ice.rs:39:9
|
||||
|
|
||||
LL | let v = Unit2.m(
|
||||
| - required by a bound introduced by this call
|
||||
LL | / L {
|
||||
LL | |
|
||||
LL | |
|
||||
LL | | f: |x| {
|
||||
... |
|
||||
LL | | drop(x);
|
||||
LL | | Unit4
|
||||
LL | | },
|
||||
LL | | },
|
||||
| |_________^ type mismatch resolving `<L<{closure@issue-62203-hrtb-ice.rs:42:16}> as T0<'r, (&u8,)>>::O == <_ as Ty<'r>>::V`
|
||||
| |_________^ type mismatch resolving `<L<{closure@issue-62203-hrtb-ice.rs:40:16}> as T0<'r, (&u8,)>>::O == <_ as Ty<'r>>::V`
|
||||
|
|
||||
note: expected this to be `<_ as Ty<'_>>::V`
|
||||
--> $DIR/issue-62203-hrtb-ice.rs:21:14
|
||||
|
@ -30,21 +29,19 @@ LL | where
|
|||
LL | F: for<'r> T0<'r, (<Self as Ty<'r>>::V,), O = <B as Ty<'r>>::V>,
|
||||
| ^^^^^^^^^^^^^^^^^^^^ required by this bound in `T1::m`
|
||||
|
||||
error[E0271]: expected `{closure@issue-62203-hrtb-ice.rs:42:16}` to be a closure that returns `Unit3`, but it returns `Unit4`
|
||||
--> $DIR/issue-62203-hrtb-ice.rs:39:9
|
||||
error[E0271]: expected `{closure@issue-62203-hrtb-ice.rs:40:16}` to be a closure that returns `Unit3`, but it returns `Unit4`
|
||||
--> $DIR/issue-62203-hrtb-ice.rs:42:17
|
||||
|
|
||||
LL | let v = Unit2.m(
|
||||
| - required by a bound introduced by this call
|
||||
LL | / L {
|
||||
LL | |
|
||||
LL | |
|
||||
LL | | f: |x| {
|
||||
... |
|
||||
LL | | },
|
||||
LL | | },
|
||||
| |_________^ expected `Unit3`, found `Unit4`
|
||||
LL | L {
|
||||
LL | f: |x| {
|
||||
| --- this closure
|
||||
LL | drop(x);
|
||||
LL | Unit4
|
||||
| ^^^^^ expected `Unit3`, found `Unit4`
|
||||
|
|
||||
note: required for `L<{closure@$DIR/issue-62203-hrtb-ice.rs:42:16: 42:19}>` to implement `for<'r> T0<'r, (&'r u8,)>`
|
||||
note: required for `L<{closure@$DIR/issue-62203-hrtb-ice.rs:40:16: 40:19}>` to implement `for<'r> T0<'r, (&'r u8,)>`
|
||||
--> $DIR/issue-62203-hrtb-ice.rs:17:16
|
||||
|
|
||||
LL | impl<'a, A, T> T0<'a, A> for L<T>
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
// There's a suggestion that turns `Iterator<u32>` into `Iterator<Item = u32>`
|
||||
// if we have more generics than the trait wants. Let's not consider RPITITs
|
||||
// for this, since that makes no sense right now.
|
||||
|
||||
trait Foo {
|
||||
fn bar(self) -> impl Sized;
|
||||
}
|
||||
|
||||
impl Foo<u8> for () {
|
||||
//~^ ERROR trait takes 0 generic arguments but 1 generic argument was supplied
|
||||
fn bar(self) -> impl Sized {}
|
||||
}
|
||||
|
||||
fn main() {}
|
|
@ -0,0 +1,17 @@
|
|||
error[E0107]: trait takes 0 generic arguments but 1 generic argument was supplied
|
||||
--> $DIR/dont-consider-unconstrained-rpitits.rs:9:6
|
||||
|
|
||||
LL | impl Foo<u8> for () {
|
||||
| ^^^---- help: remove the unnecessary generics
|
||||
| |
|
||||
| expected 0 generic arguments
|
||||
|
|
||||
note: trait defined here, with 0 generic parameters
|
||||
--> $DIR/dont-consider-unconstrained-rpitits.rs:5:7
|
||||
|
|
||||
LL | trait Foo {
|
||||
| ^^^
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0107`.
|
|
@ -3,6 +3,9 @@
|
|||
//@ error-pattern: reached the recursion limit while instantiating
|
||||
//@ error-pattern: reached the recursion limit finding the struct tail
|
||||
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
// Regression test for #114484: This used to ICE during monomorphization, because we treated
|
||||
// `<VirtualWrapper<...> as Pointee>::Metadata` as a rigid projection after reaching the recursion
|
||||
// limit when finding the struct tail.
|
||||
|
|
|
@ -18,7 +18,7 @@ error: reached the recursion limit finding the struct tail for `[u8; 256]`
|
|||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
note: the above error was encountered while instantiating `fn virtualize_my_trait::<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<SomeData<256>, 0>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>>`
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:26:18
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:29:18
|
||||
|
|
||||
LL | unsafe { virtualize_my_trait(L, self) }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -43,7 +43,7 @@ error: reached the recursion limit finding the struct tail for `SomeData<256>`
|
|||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
note: the above error was encountered while instantiating `fn virtualize_my_trait::<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<SomeData<256>, 0>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>>`
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:26:18
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:29:18
|
||||
|
|
||||
LL | unsafe { virtualize_my_trait(L, self) }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -68,7 +68,7 @@ error: reached the recursion limit finding the struct tail for `VirtualWrapper<S
|
|||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
note: the above error was encountered while instantiating `fn virtualize_my_trait::<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<SomeData<256>, 0>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>, 1>>`
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:26:18
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:29:18
|
||||
|
|
||||
LL | unsafe { virtualize_my_trait(L, self) }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -76,11 +76,11 @@ LL | unsafe { virtualize_my_trait(L, self) }
|
|||
error: reached the recursion limit while instantiating `<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<VirtualWrapper<..., 1>, 1>, 1>, 1>, 1> as MyTrait>::virtualize`
|
||||
|
|
||||
note: `<VirtualWrapper<T, L> as MyTrait>::virtualize` defined here
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:25:5
|
||||
--> $DIR/infinite-instantiation-struct-tail-ice-114484.rs:28:5
|
||||
|
|
||||
LL | fn virtualize(&self) -> &dyn MyTrait {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/infinite/infinite-instantiation-struct-tail-ice-114484/infinite-instantiation-struct-tail-ice-114484.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 13 previous errors
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//@ build-fail
|
||||
//@ normalize-stderr: ".nll/" -> "/"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
trait ToOpt: Sized {
|
||||
fn to_option(&self) -> Option<Self>;
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
error: reached the recursion limit while instantiating `function::<Option<Option<Option<Option<Option<...>>>>>>`
|
||||
--> $DIR/infinite-instantiation.rs:22:9
|
||||
--> $DIR/infinite-instantiation.rs:23:9
|
||||
|
|
||||
LL | function(counter - 1, t.to_option());
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `function` defined here
|
||||
--> $DIR/infinite-instantiation.rs:20:1
|
||||
--> $DIR/infinite-instantiation.rs:21:1
|
||||
|
|
||||
LL | fn function<T:ToOpt + Clone>(counter: usize, t: T) {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/infinite/infinite-instantiation/infinite-instantiation.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//@ build-fail
|
||||
//@ normalize-stderr: ".nll/" -> "/"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
trait Mirror {
|
||||
type Image;
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
error: reached the recursion limit while instantiating `<(&(&(..., ...), ...), ...) as Foo>::recurse`
|
||||
--> $DIR/issue-37311.rs:17:9
|
||||
--> $DIR/issue-37311.rs:18:9
|
||||
|
|
||||
LL | (self, self).recurse();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `<T as Foo>::recurse` defined here
|
||||
--> $DIR/issue-37311.rs:16:5
|
||||
--> $DIR/issue-37311.rs:17:5
|
||||
|
|
||||
LL | fn recurse(&self) {
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/issues/issue-37311-type-length-limit/issue-37311/issue-37311.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
//@ build-fail
|
||||
//@ compile-flags: -Copt-level=0
|
||||
//@ normalize-stderr: ".nll/" -> "/"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
fn main() {
|
||||
rec(Empty);
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
error: reached the recursion limit while instantiating `rec::<&mut &mut &mut &mut &mut ...>`
|
||||
--> $DIR/issue-67552.rs:29:9
|
||||
--> $DIR/issue-67552.rs:30:9
|
||||
|
|
||||
LL | rec(identity(&mut it))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `rec` defined here
|
||||
--> $DIR/issue-67552.rs:22:1
|
||||
--> $DIR/issue-67552.rs:23:1
|
||||
|
|
||||
LL | / fn rec<T>(mut it: T)
|
||||
LL | | where
|
||||
LL | | T: Iterator,
|
||||
| |________________^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/issues/issue-67552/issue-67552.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
// recursions.
|
||||
|
||||
//@ build-fail
|
||||
//@ normalize-stderr: ".nll/" -> "/"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
fn generic<T>() { //~ WARN function cannot return without recursing
|
||||
generic::<Option<T>>();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
warning: function cannot return without recursing
|
||||
--> $DIR/issue-8727.rs:7:1
|
||||
--> $DIR/issue-8727.rs:8:1
|
||||
|
|
||||
LL | fn generic<T>() {
|
||||
| ^^^^^^^^^^^^^^^ cannot return without recursing
|
||||
|
@ -10,17 +10,17 @@ LL | generic::<Option<T>>();
|
|||
= note: `#[warn(unconditional_recursion)]` on by default
|
||||
|
||||
error: reached the recursion limit while instantiating `generic::<Option<Option<Option<Option<Option<...>>>>>>`
|
||||
--> $DIR/issue-8727.rs:8:5
|
||||
--> $DIR/issue-8727.rs:9:5
|
||||
|
|
||||
LL | generic::<Option<T>>();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `generic` defined here
|
||||
--> $DIR/issue-8727.rs:7:1
|
||||
--> $DIR/issue-8727.rs:8:1
|
||||
|
|
||||
LL | fn generic<T>() {
|
||||
| ^^^^^^^^^^^^^^^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/issues/issue-8727/issue-8727.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 1 previous error; 1 warning emitted
|
||||
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
error[E0271]: expected `{closure@fallback-closure-wrap.rs:18:40}` to be a closure that returns `()`, but it returns `!`
|
||||
--> $DIR/fallback-closure-wrap.rs:18:31
|
||||
--> $DIR/fallback-closure-wrap.rs:19:9
|
||||
|
|
||||
LL | let error = Closure::wrap(Box::new(move || {
|
||||
| _______________________________^
|
||||
LL | |
|
||||
LL | | panic!("Can't connect to server.");
|
||||
LL | | }) as Box<dyn FnMut()>);
|
||||
| |______^ expected `()`, found `!`
|
||||
| ------- this closure
|
||||
LL | panic!("Can't connect to server.");
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `()`, found `!`
|
||||
|
|
||||
= note: expected unit type `()`
|
||||
found type `!`
|
||||
= note: required for the cast from `Box<{closure@$DIR/fallback-closure-wrap.rs:18:40: 18:47}>` to `Box<dyn FnMut()>`
|
||||
= note: this error originates in the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
|
|
@ -16,8 +16,8 @@ use std::marker::PhantomData;
|
|||
|
||||
fn main() {
|
||||
let error = Closure::wrap(Box::new(move || {
|
||||
//[fallback]~^ to be a closure that returns `()`, but it returns `!`
|
||||
panic!("Can't connect to server.");
|
||||
//[fallback]~^ to be a closure that returns `()`, but it returns `!`
|
||||
}) as Box<dyn FnMut()>);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
//@ build-fail
|
||||
//@ compile-flags:-C overflow-checks=off
|
||||
//@ normalize-stderr: ".nll/" -> "/"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
enum Nil {NilValue}
|
||||
struct Cons<T> {head:isize, tail:T}
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
error: reached the recursion limit while instantiating `test::<Cons<Cons<Cons<Cons<Cons<...>>>>>>`
|
||||
--> $DIR/recursion.rs:18:11
|
||||
--> $DIR/recursion.rs:19:11
|
||||
|
|
||||
LL | _ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})}
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: `test` defined here
|
||||
--> $DIR/recursion.rs:16:1
|
||||
--> $DIR/recursion.rs:17:1
|
||||
|
|
||||
LL | fn test<T:Dot> (n:isize, i:isize, first:T, second:T) ->isize {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/recursion/recursion/recursion.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
//@ compile-flags: --diagnostic-width=60 -Z write-long-types-to-disk=yes
|
||||
//@ normalize-stderr: "long-type-\d+" -> "long-type-hash"
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type-\d+.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type-hash.txt'"
|
||||
|
||||
pub fn foo() -> impl std::fmt::Display {
|
||||
//~^ ERROR doesn't implement `std::fmt::Display`
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0277]: `Option<Option<Option<...>>>` doesn't implement `std::fmt::Display`
|
||||
--> $DIR/on_unimplemented_long_types.rs:4:17
|
||||
--> $DIR/on_unimplemented_long_types.rs:5:17
|
||||
|
|
||||
LL | pub fn foo() -> impl std::fmt::Display {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^ `Option<Option<Option<...>>>` cannot be formatted with the default formatter
|
||||
|
@ -13,11 +13,11 @@ LL | | ))))))))))),
|
|||
LL | | )))))))))))
|
||||
| |_______________- return type was inferred to be `Option<Option<Option<...>>>` here
|
||||
|
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/traits/on_unimplemented_long_types/on_unimplemented_long_types.long-type-hash.txt'
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
= help: the trait `std::fmt::Display` is not implemented for `Option<Option<Option<...>>>`
|
||||
= note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/traits/on_unimplemented_long_types/on_unimplemented_long_types.long-type-hash.txt'
|
||||
= note: the full name for the type has been written to '$TEST_BUILD_DIR/$FILE.long-type-hash.txt'
|
||||
= note: consider using `--verbose` to print the full type name to the console
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
|
|
@ -2,6 +2,9 @@
|
|||
//@ compile-flags: -Copt-level=0 -Zenforce-type-length-limit
|
||||
//~^^ ERROR reached the type-length limit
|
||||
|
||||
// The regex below normalizes the long type file name to make it suitable for compare-modes.
|
||||
//@ normalize-stderr: "'\$TEST_BUILD_DIR/.*\.long-type.txt'" -> "'$$TEST_BUILD_DIR/$$FILE.long-type.txt'"
|
||||
|
||||
// Test that the type length limit can be changed.
|
||||
// The exact type depends on optimizations, so disable them.
|
||||
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
error: reached the type-length limit while instantiating `std::mem::drop::<Option<((((..., ..., ...), ..., ...), ..., ...), ..., ...)>>`
|
||||
--> $DIR/type_length_limit.rs:32:5
|
||||
--> $DIR/type_length_limit.rs:35:5
|
||||
|
|
||||
LL | drop::<Option<A>>(None);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= help: consider adding a `#![type_length_limit="4010"]` attribute to your crate
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/type_length_limit/type_length_limit.long-type.txt'
|
||||
= note: the full type name has been written to '$TEST_BUILD_DIR/$FILE.long-type.txt'
|
||||
|
||||
error: reached the type-length limit while instantiating `<{closure@rt::lang_start<()>::{closure#0}} as FnMut<()>>::call_mut`
|
||||
|
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue