1
Fork 0

Auto merge of #139012 - Zalathar:rollup-qgt5yfo, r=Zalathar

Rollup of 10 pull requests

Successful merges:

 - #130883 (Add environment variable query)
 - #138624 (Add mipsel maintainer)
 - #138672 (Avoiding calling queries when collecting active queries)
 - #138935 (Update wg-prio triagebot config)
 - #138946 (Un-bury chapters from the chapter list in rustc book)
 - #138964 (Implement lint against using Interner and InferCtxtLike in random compiler crates)
 - #138977 (Don't deaggregate InvocationParent just to reaggregate it again)
 - #138980 (Collect items referenced from var_debug_info)
 - #138985 (Use the correct binder scope for elided lifetimes in assoc consts)
 - #138987 (Always emit `native-static-libs` note, even if it is empty)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-03-27 07:47:39 +00:00
commit ecb170afc8
46 changed files with 733 additions and 311 deletions

View file

@ -1,9 +1,9 @@
//! The entry point of the NLL borrow checker. //! The entry point of the NLL borrow checker.
use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
use std::str::FromStr; use std::str::FromStr;
use std::{env, io};
use polonius_engine::{Algorithm, Output}; use polonius_engine::{Algorithm, Output};
use rustc_index::IndexSlice; use rustc_index::IndexSlice;
@ -162,9 +162,8 @@ pub(crate) fn compute_regions<'a, 'tcx>(
} }
if polonius_output { if polonius_output {
let algorithm = let algorithm = infcx.tcx.env_var("POLONIUS_ALGORITHM").unwrap_or("Hybrid");
env::var("POLONIUS_ALGORITHM").unwrap_or_else(|_| String::from("Hybrid")); let algorithm = Algorithm::from_str(algorithm).unwrap();
let algorithm = Algorithm::from_str(&algorithm).unwrap();
debug!("compute_regions: using polonius algorithm {:?}", algorithm); debug!("compute_regions: using polonius algorithm {:?}", algorithm);
let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis"); let _prof_timer = infcx.tcx.prof.generic_activity("polonius_analysis");
Some(Box::new(Output::compute(polonius_facts, algorithm, false))) Some(Box::new(Output::compute(polonius_facts, algorithm, false)))

View file

@ -1560,17 +1560,13 @@ fn print_native_static_libs(
match out { match out {
OutFileName::Real(path) => { OutFileName::Real(path) => {
out.overwrite(&lib_args.join(" "), sess); out.overwrite(&lib_args.join(" "), sess);
if !lib_args.is_empty() { sess.dcx().emit_note(errors::StaticLibraryNativeArtifactsToFile { path });
sess.dcx().emit_note(errors::StaticLibraryNativeArtifactsToFile { path });
}
} }
OutFileName::Stdout => { OutFileName::Stdout => {
if !lib_args.is_empty() { sess.dcx().emit_note(errors::StaticLibraryNativeArtifacts);
sess.dcx().emit_note(errors::StaticLibraryNativeArtifacts); // Prefix for greppability
// Prefix for greppability // Note: This must not be translated as tools are allowed to depend on this exact string.
// Note: This must not be translated as tools are allowed to depend on this exact string. sess.dcx().note(format!("native-static-libs: {}", lib_args.join(" ")));
sess.dcx().note(format!("native-static-libs: {}", lib_args.join(" ")));
}
} }
} }
} }

View file

@ -564,6 +564,8 @@ where
} }
} }
impl_stable_traits_for_trivial_type!(::std::ffi::OsStr);
impl_stable_traits_for_trivial_type!(::std::path::Path); impl_stable_traits_for_trivial_type!(::std::path::Path);
impl_stable_traits_for_trivial_type!(::std::path::PathBuf); impl_stable_traits_for_trivial_type!(::std::path::PathBuf);

View file

@ -1,5 +1,5 @@
use std::any::Any; use std::any::Any;
use std::ffi::OsString; use std::ffi::{OsStr, OsString};
use std::io::{self, BufWriter, Write}; use std::io::{self, BufWriter, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Arc, LazyLock, OnceLock}; use std::sync::{Arc, LazyLock, OnceLock};
@ -361,6 +361,31 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
) )
} }
fn env_var_os<'tcx>(tcx: TyCtxt<'tcx>, key: &'tcx OsStr) -> Option<&'tcx OsStr> {
let value = env::var_os(key);
let value_tcx = value.as_ref().map(|value| {
let encoded_bytes = tcx.arena.alloc_slice(value.as_encoded_bytes());
debug_assert_eq!(value.as_encoded_bytes(), encoded_bytes);
// SAFETY: The bytes came from `as_encoded_bytes`, and we assume that
// `alloc_slice` is implemented correctly, and passes the same bytes
// back (debug asserted above).
unsafe { OsStr::from_encoded_bytes_unchecked(encoded_bytes) }
});
// Also add the variable to Cargo's dependency tracking
//
// NOTE: This only works for passes run before `write_dep_info`. See that
// for extension points for configuring environment variables to be
// properly change-tracked.
tcx.sess.psess.env_depinfo.borrow_mut().insert((
Symbol::intern(&key.to_string_lossy()),
value.as_ref().and_then(|value| value.to_str()).map(|value| Symbol::intern(&value)),
));
value_tcx
}
// Returns all the paths that correspond to generated files. // Returns all the paths that correspond to generated files.
fn generated_output_paths( fn generated_output_paths(
tcx: TyCtxt<'_>, tcx: TyCtxt<'_>,
@ -725,6 +750,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
|tcx, _| tcx.arena.alloc_from_iter(tcx.resolutions(()).stripped_cfg_items.steal()); |tcx, _| tcx.arena.alloc_from_iter(tcx.resolutions(()).stripped_cfg_items.steal());
providers.resolutions = |tcx, ()| tcx.resolver_for_lowering_raw(()).1; providers.resolutions = |tcx, ()| tcx.resolver_for_lowering_raw(()).1;
providers.early_lint_checks = early_lint_checks; providers.early_lint_checks = early_lint_checks;
providers.env_var_os = env_var_os;
limits::provide(providers); limits::provide(providers);
proc_macro_decls::provide(providers); proc_macro_decls::provide(providers);
rustc_const_eval::provide(providers); rustc_const_eval::provide(providers);

View file

@ -18,7 +18,7 @@ use rustc_session::{EarlyDiagCtxt, Session, filesearch};
use rustc_span::edit_distance::find_best_match_for_name; use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::edition::Edition; use rustc_span::edition::Edition;
use rustc_span::source_map::SourceMapInputs; use rustc_span::source_map::SourceMapInputs;
use rustc_span::{Symbol, sym}; use rustc_span::{SessionGlobals, Symbol, sym};
use rustc_target::spec::Target; use rustc_target::spec::Target;
use tracing::info; use tracing::info;
@ -188,26 +188,11 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
// On deadlock, creates a new thread and forwards information in thread // On deadlock, creates a new thread and forwards information in thread
// locals to it. The new thread runs the deadlock handler. // locals to it. The new thread runs the deadlock handler.
// Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a let current_gcx2 = current_gcx2.clone();
// `TyCtxt` TLS reference here.
let query_map = current_gcx2.access(|gcx| {
tls::enter_context(&tls::ImplicitCtxt::new(gcx), || {
tls::with(|tcx| {
match QueryCtxt::new(tcx).collect_active_jobs() {
Ok(query_map) => query_map,
Err(_) => {
// There was an unexpected error collecting all active jobs, which we need
// to find cycles to break.
// We want to avoid panicking in the deadlock handler, so we abort instead.
eprintln!("internal compiler error: failed to get query map in deadlock handler, aborting process");
process::abort();
}
}
})
})
});
let query_map = FromDyn::from(query_map);
let registry = rayon_core::Registry::current(); let registry = rayon_core::Registry::current();
let session_globals = rustc_span::with_session_globals(|session_globals| {
session_globals as *const SessionGlobals as usize
});
thread::Builder::new() thread::Builder::new()
.name("rustc query cycle handler".to_string()) .name("rustc query cycle handler".to_string())
.spawn(move || { .spawn(move || {
@ -217,7 +202,24 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
// otherwise the compiler could just hang, // otherwise the compiler could just hang,
process::abort(); process::abort();
}); });
break_query_cycles(query_map.into_inner(), &registry);
// Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a
// `TyCtxt` TLS reference here.
current_gcx2.access(|gcx| {
tls::enter_context(&tls::ImplicitCtxt::new(gcx), || {
tls::with(|tcx| {
// Accessing session globals is sound as they outlive `GlobalCtxt`.
// They are needed to hash query keys containing spans or symbols.
let query_map = rustc_span::set_session_globals_then(unsafe { &*(session_globals as *const SessionGlobals) }, || {
// Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
QueryCtxt::new(tcx).collect_active_jobs().ok().expect("failed to collect active queries in deadlock handler")
});
break_query_cycles(query_map, &registry);
})
})
});
on_panic.disable(); on_panic.disable();
}) })
.unwrap(); .unwrap();

View file

@ -799,6 +799,9 @@ lint_tykind_kind = usage of `ty::TyKind::<kind>`
lint_type_ir_inherent_usage = do not use `rustc_type_ir::inherent` unless you're inside of the trait solver lint_type_ir_inherent_usage = do not use `rustc_type_ir::inherent` unless you're inside of the trait solver
.note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler .note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler
lint_type_ir_trait_usage = do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver
.note = the method or struct you're looking for is likely defined somewhere else downstream in the compiler
lint_undropped_manually_drops = calls to `std::mem::drop` with `std::mem::ManuallyDrop` instead of the inner value does nothing lint_undropped_manually_drops = calls to `std::mem::drop` with `std::mem::ManuallyDrop` instead of the inner value does nothing
.label = argument has type `{$arg_ty}` .label = argument has type `{$arg_ty}`
.suggestion = use `std::mem::ManuallyDrop::into_inner` to get the inner value .suggestion = use `std::mem::ManuallyDrop::into_inner` to get the inner value

View file

@ -1,24 +1,21 @@
//! Some lints that are only useful in the compiler or crates that use compiler internals, such as //! Some lints that are only useful in the compiler or crates that use compiler internals, such as
//! Clippy. //! Clippy.
use rustc_ast as ast; use rustc_hir::HirId;
use rustc_hir::def::Res; use rustc_hir::def::Res;
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
use rustc_hir::{
AmbigArg, BinOp, BinOpKind, Expr, ExprKind, GenericArg, HirId, Impl, Item, ItemKind, Node, Pat,
PatExpr, PatExprKind, PatKind, Path, PathSegment, QPath, Ty, TyKind,
};
use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy}; use rustc_middle::ty::{self, GenericArgsRef, Ty as MiddleTy};
use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::{Span, sym}; use rustc_span::{Span, sym};
use tracing::debug; use tracing::debug;
use {rustc_ast as ast, rustc_hir as hir};
use crate::lints::{ use crate::lints::{
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand,
NonGlobImportTypeIrInherent, QueryInstability, QueryUntracked, SpanUseEqCtxtDiag, NonGlobImportTypeIrInherent, QueryInstability, QueryUntracked, SpanUseEqCtxtDiag,
SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrInherentUsage, SymbolInternStringLiteralDiag, TyQualified, TykindDiag, TykindKind, TypeIrInherentUsage,
UntranslatableDiag, TypeIrTraitUsage, UntranslatableDiag,
}; };
use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
@ -37,9 +34,12 @@ declare_tool_lint! {
declare_lint_pass!(DefaultHashTypes => [DEFAULT_HASH_TYPES]); declare_lint_pass!(DefaultHashTypes => [DEFAULT_HASH_TYPES]);
impl LateLintPass<'_> for DefaultHashTypes { impl LateLintPass<'_> for DefaultHashTypes {
fn check_path(&mut self, cx: &LateContext<'_>, path: &Path<'_>, hir_id: HirId) { fn check_path(&mut self, cx: &LateContext<'_>, path: &hir::Path<'_>, hir_id: HirId) {
let Res::Def(rustc_hir::def::DefKind::Struct, def_id) = path.res else { return }; let Res::Def(rustc_hir::def::DefKind::Struct, def_id) = path.res else { return };
if matches!(cx.tcx.hir_node(hir_id), Node::Item(Item { kind: ItemKind::Use(..), .. })) { if matches!(
cx.tcx.hir_node(hir_id),
hir::Node::Item(hir::Item { kind: hir::ItemKind::Use(..), .. })
) {
// Don't lint imports, only actual usages. // Don't lint imports, only actual usages.
return; return;
} }
@ -60,10 +60,10 @@ impl LateLintPass<'_> for DefaultHashTypes {
/// get the `DefId` and `GenericArgsRef` of the function. /// get the `DefId` and `GenericArgsRef` of the function.
fn typeck_results_of_method_fn<'tcx>( fn typeck_results_of_method_fn<'tcx>(
cx: &LateContext<'tcx>, cx: &LateContext<'tcx>,
expr: &Expr<'_>, expr: &hir::Expr<'_>,
) -> Option<(Span, DefId, ty::GenericArgsRef<'tcx>)> { ) -> Option<(Span, DefId, ty::GenericArgsRef<'tcx>)> {
match expr.kind { match expr.kind {
ExprKind::MethodCall(segment, ..) hir::ExprKind::MethodCall(segment, ..)
if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) => if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) =>
{ {
Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id))) Some((segment.ident.span, def_id, cx.typeck_results().node_args(expr.hir_id)))
@ -102,7 +102,7 @@ declare_tool_lint! {
declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY, UNTRACKED_QUERY_INFORMATION]); declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY, UNTRACKED_QUERY_INFORMATION]);
impl LateLintPass<'_> for QueryStability { impl LateLintPass<'_> for QueryStability {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return }; let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return };
if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.typing_env(), def_id, args) if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.typing_env(), def_id, args)
{ {
@ -164,21 +164,25 @@ impl<'tcx> LateLintPass<'tcx> for TyTyKind {
} }
} }
fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx Ty<'tcx, AmbigArg>) { fn check_ty(&mut self, cx: &LateContext<'_>, ty: &'tcx hir::Ty<'tcx, hir::AmbigArg>) {
match &ty.kind { match &ty.kind {
TyKind::Path(QPath::Resolved(_, path)) => { hir::TyKind::Path(hir::QPath::Resolved(_, path)) => {
if lint_ty_kind_usage(cx, &path.res) { if lint_ty_kind_usage(cx, &path.res) {
let span = match cx.tcx.parent_hir_node(ty.hir_id) { let span = match cx.tcx.parent_hir_node(ty.hir_id) {
Node::PatExpr(PatExpr { kind: PatExprKind::Path(qpath), .. }) hir::Node::PatExpr(hir::PatExpr {
| Node::Pat(Pat { kind: hir::PatExprKind::Path(qpath),
kind: PatKind::TupleStruct(qpath, ..) | PatKind::Struct(qpath, ..),
.. ..
}) })
| Node::Expr( | hir::Node::Pat(hir::Pat {
Expr { kind: ExprKind::Path(qpath), .. } kind:
| &Expr { kind: ExprKind::Struct(qpath, ..), .. }, hir::PatKind::TupleStruct(qpath, ..) | hir::PatKind::Struct(qpath, ..),
..
})
| hir::Node::Expr(
hir::Expr { kind: hir::ExprKind::Path(qpath), .. }
| &hir::Expr { kind: hir::ExprKind::Struct(qpath, ..), .. },
) => { ) => {
if let QPath::TypeRelative(qpath_ty, ..) = qpath if let hir::QPath::TypeRelative(qpath_ty, ..) = qpath
&& qpath_ty.hir_id == ty.hir_id && qpath_ty.hir_id == ty.hir_id
{ {
Some(path.span) Some(path.span)
@ -223,7 +227,7 @@ fn lint_ty_kind_usage(cx: &LateContext<'_>, res: &Res) -> bool {
} }
} }
fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &Path<'_>) -> Option<String> { fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &hir::Path<'_>) -> Option<String> {
match &path.res { match &path.res {
Res::Def(_, def_id) => { Res::Def(_, def_id) => {
if let Some(name @ (sym::Ty | sym::TyCtxt)) = cx.tcx.get_diagnostic_name(*def_id) { if let Some(name @ (sym::Ty | sym::TyCtxt)) = cx.tcx.get_diagnostic_name(*def_id) {
@ -244,13 +248,17 @@ fn is_ty_or_ty_ctxt(cx: &LateContext<'_>, path: &Path<'_>) -> Option<String> {
None None
} }
fn gen_args(segment: &PathSegment<'_>) -> String { fn gen_args(segment: &hir::PathSegment<'_>) -> String {
if let Some(args) = &segment.args { if let Some(args) = &segment.args {
let lifetimes = args let lifetimes = args
.args .args
.iter() .iter()
.filter_map(|arg| { .filter_map(|arg| {
if let GenericArg::Lifetime(lt) = arg { Some(lt.ident.to_string()) } else { None } if let hir::GenericArg::Lifetime(lt) = arg {
Some(lt.ident.to_string())
} else {
None
}
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -272,7 +280,7 @@ declare_tool_lint! {
} }
declare_tool_lint! { declare_tool_lint! {
/// The `usage_of_type_ir_inherent` lint detects usage `rustc_type_ir::inherent`. /// The `usage_of_type_ir_inherent` lint detects usage of `rustc_type_ir::inherent`.
/// ///
/// This module should only be used within the trait solver. /// This module should only be used within the trait solver.
pub rustc::USAGE_OF_TYPE_IR_INHERENT, pub rustc::USAGE_OF_TYPE_IR_INHERENT,
@ -281,10 +289,43 @@ declare_tool_lint! {
report_in_external_macro: true report_in_external_macro: true
} }
declare_lint_pass!(TypeIr => [NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT]); declare_tool_lint! {
/// The `usage_of_type_ir_traits` lint detects usage of `rustc_type_ir::Interner`,
/// or `rustc_infer::InferCtxtLike`.
///
/// Methods of this trait should only be used within the type system abstraction layer,
/// and in the generic next trait solver implementation. Look for an analogously named
/// method on `TyCtxt` or `InferCtxt` (respectively).
pub rustc::USAGE_OF_TYPE_IR_TRAITS,
Allow,
"usage `rustc_type_ir`-specific abstraction traits outside of trait system",
report_in_external_macro: true
}
declare_lint_pass!(TypeIr => [NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_INHERENT, USAGE_OF_TYPE_IR_TRAITS]);
impl<'tcx> LateLintPass<'tcx> for TypeIr { impl<'tcx> LateLintPass<'tcx> for TypeIr {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
let res_def_id = match expr.kind {
hir::ExprKind::Path(hir::QPath::Resolved(_, path)) => path.res.opt_def_id(),
hir::ExprKind::Path(hir::QPath::TypeRelative(..)) | hir::ExprKind::MethodCall(..) => {
cx.typeck_results().type_dependent_def_id(expr.hir_id)
}
_ => return,
};
let Some(res_def_id) = res_def_id else {
return;
};
if let Some(assoc_item) = cx.tcx.opt_associated_item(res_def_id)
&& let Some(trait_def_id) = assoc_item.trait_container(cx.tcx)
&& (cx.tcx.is_diagnostic_item(sym::type_ir_interner, trait_def_id)
| cx.tcx.is_diagnostic_item(sym::type_ir_infer_ctxt_like, trait_def_id))
{
cx.emit_span_lint(USAGE_OF_TYPE_IR_TRAITS, expr.span, TypeIrTraitUsage);
}
}
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
let rustc_hir::ItemKind::Use(path, kind) = item.kind else { return }; let rustc_hir::ItemKind::Use(path, kind) = item.kind else { return };
let is_mod_inherent = |def_id| cx.tcx.is_diagnostic_item(sym::type_ir_inherent, def_id); let is_mod_inherent = |def_id| cx.tcx.is_diagnostic_item(sym::type_ir_inherent, def_id);
@ -394,15 +435,15 @@ declare_tool_lint! {
declare_lint_pass!(Diagnostics => [UNTRANSLATABLE_DIAGNOSTIC, DIAGNOSTIC_OUTSIDE_OF_IMPL]); declare_lint_pass!(Diagnostics => [UNTRANSLATABLE_DIAGNOSTIC, DIAGNOSTIC_OUTSIDE_OF_IMPL]);
impl LateLintPass<'_> for Diagnostics { impl LateLintPass<'_> for Diagnostics {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
let collect_args_tys_and_spans = |args: &[Expr<'_>], reserve_one_extra: bool| { let collect_args_tys_and_spans = |args: &[hir::Expr<'_>], reserve_one_extra: bool| {
let mut result = Vec::with_capacity(args.len() + usize::from(reserve_one_extra)); let mut result = Vec::with_capacity(args.len() + usize::from(reserve_one_extra));
result.extend(args.iter().map(|arg| (cx.typeck_results().expr_ty(arg), arg.span))); result.extend(args.iter().map(|arg| (cx.typeck_results().expr_ty(arg), arg.span)));
result result
}; };
// Only check function calls and method calls. // Only check function calls and method calls.
let (span, def_id, fn_gen_args, arg_tys_and_spans) = match expr.kind { let (span, def_id, fn_gen_args, arg_tys_and_spans) = match expr.kind {
ExprKind::Call(callee, args) => { hir::ExprKind::Call(callee, args) => {
match cx.typeck_results().node_type(callee.hir_id).kind() { match cx.typeck_results().node_type(callee.hir_id).kind() {
&ty::FnDef(def_id, fn_gen_args) => { &ty::FnDef(def_id, fn_gen_args) => {
(callee.span, def_id, fn_gen_args, collect_args_tys_and_spans(args, false)) (callee.span, def_id, fn_gen_args, collect_args_tys_and_spans(args, false))
@ -410,7 +451,7 @@ impl LateLintPass<'_> for Diagnostics {
_ => return, // occurs for fns passed as args _ => return, // occurs for fns passed as args
} }
} }
ExprKind::MethodCall(_segment, _recv, args, _span) => { hir::ExprKind::MethodCall(_segment, _recv, args, _span) => {
let Some((span, def_id, fn_gen_args)) = typeck_results_of_method_fn(cx, expr) let Some((span, def_id, fn_gen_args)) = typeck_results_of_method_fn(cx, expr)
else { else {
return; return;
@ -514,8 +555,8 @@ impl Diagnostics {
let mut is_inside_appropriate_impl = false; let mut is_inside_appropriate_impl = false;
for (_hir_id, parent) in cx.tcx.hir_parent_iter(current_id) { for (_hir_id, parent) in cx.tcx.hir_parent_iter(current_id) {
debug!(?parent); debug!(?parent);
if let Node::Item(Item { kind: ItemKind::Impl(impl_), .. }) = parent if let hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(impl_), .. }) = parent
&& let Impl { of_trait: Some(of_trait), .. } = impl_ && let hir::Impl { of_trait: Some(of_trait), .. } = impl_
&& let Some(def_id) = of_trait.trait_def_id() && let Some(def_id) = of_trait.trait_def_id()
&& let Some(name) = cx.tcx.get_diagnostic_name(def_id) && let Some(name) = cx.tcx.get_diagnostic_name(def_id)
&& matches!(name, sym::Diagnostic | sym::Subdiagnostic | sym::LintDiagnostic) && matches!(name, sym::Diagnostic | sym::Subdiagnostic | sym::LintDiagnostic)
@ -543,8 +584,8 @@ declare_tool_lint! {
declare_lint_pass!(BadOptAccess => [BAD_OPT_ACCESS]); declare_lint_pass!(BadOptAccess => [BAD_OPT_ACCESS]);
impl LateLintPass<'_> for BadOptAccess { impl LateLintPass<'_> for BadOptAccess {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
let ExprKind::Field(base, target) = expr.kind else { return }; let hir::ExprKind::Field(base, target) = expr.kind else { return };
let Some(adt_def) = cx.typeck_results().expr_ty(base).ty_adt_def() else { return }; let Some(adt_def) = cx.typeck_results().expr_ty(base).ty_adt_def() else { return };
// Skip types without `#[rustc_lint_opt_ty]` - only so that the rest of the lint can be // Skip types without `#[rustc_lint_opt_ty]` - only so that the rest of the lint can be
// avoided. // avoided.
@ -581,9 +622,12 @@ declare_tool_lint! {
declare_lint_pass!(SpanUseEqCtxt => [SPAN_USE_EQ_CTXT]); declare_lint_pass!(SpanUseEqCtxt => [SPAN_USE_EQ_CTXT]);
impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt { impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'_>) { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &hir::Expr<'_>) {
if let ExprKind::Binary(BinOp { node: BinOpKind::Eq | BinOpKind::Ne, .. }, lhs, rhs) = if let hir::ExprKind::Binary(
expr.kind hir::BinOp { node: hir::BinOpKind::Eq | hir::BinOpKind::Ne, .. },
lhs,
rhs,
) = expr.kind
{ {
if is_span_ctxt_call(cx, lhs) && is_span_ctxt_call(cx, rhs) { if is_span_ctxt_call(cx, lhs) && is_span_ctxt_call(cx, rhs) {
cx.emit_span_lint(SPAN_USE_EQ_CTXT, expr.span, SpanUseEqCtxtDiag); cx.emit_span_lint(SPAN_USE_EQ_CTXT, expr.span, SpanUseEqCtxtDiag);
@ -592,9 +636,9 @@ impl<'tcx> LateLintPass<'tcx> for SpanUseEqCtxt {
} }
} }
fn is_span_ctxt_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { fn is_span_ctxt_call(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
match &expr.kind { match &expr.kind {
ExprKind::MethodCall(..) => cx hir::ExprKind::MethodCall(..) => cx
.typeck_results() .typeck_results()
.type_dependent_def_id(expr.hir_id) .type_dependent_def_id(expr.hir_id)
.is_some_and(|call_did| cx.tcx.is_diagnostic_item(sym::SpanCtxt, call_did)), .is_some_and(|call_did| cx.tcx.is_diagnostic_item(sym::SpanCtxt, call_did)),
@ -617,11 +661,11 @@ declare_lint_pass!(SymbolInternStringLiteral => [SYMBOL_INTERN_STRING_LITERAL]);
impl<'tcx> LateLintPass<'tcx> for SymbolInternStringLiteral { impl<'tcx> LateLintPass<'tcx> for SymbolInternStringLiteral {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) {
if let ExprKind::Call(path, [arg]) = expr.kind if let hir::ExprKind::Call(path, [arg]) = expr.kind
&& let ExprKind::Path(ref qpath) = path.kind && let hir::ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id() && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::SymbolIntern, def_id) && cx.tcx.is_diagnostic_item(sym::SymbolIntern, def_id)
&& let ExprKind::Lit(kind) = arg.kind && let hir::ExprKind::Lit(kind) = arg.kind
&& let rustc_ast::LitKind::Str(_, _) = kind.node && let rustc_ast::LitKind::Str(_, _) = kind.node
{ {
cx.emit_span_lint( cx.emit_span_lint(

View file

@ -645,6 +645,7 @@ fn register_internals(store: &mut LintStore) {
LintId::of(USAGE_OF_QUALIFIED_TY), LintId::of(USAGE_OF_QUALIFIED_TY),
LintId::of(NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT), LintId::of(NON_GLOB_IMPORT_OF_TYPE_IR_INHERENT),
LintId::of(USAGE_OF_TYPE_IR_INHERENT), LintId::of(USAGE_OF_TYPE_IR_INHERENT),
LintId::of(USAGE_OF_TYPE_IR_TRAITS),
LintId::of(BAD_OPT_ACCESS), LintId::of(BAD_OPT_ACCESS),
LintId::of(SPAN_USE_EQ_CTXT), LintId::of(SPAN_USE_EQ_CTXT),
], ],

View file

@ -943,6 +943,11 @@ pub(crate) struct TyQualified {
#[note] #[note]
pub(crate) struct TypeIrInherentUsage; pub(crate) struct TypeIrInherentUsage;
#[derive(LintDiagnostic)]
#[diag(lint_type_ir_trait_usage)]
#[note]
pub(crate) struct TypeIrTraitUsage;
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_non_glob_import_type_ir_inherent)] #[diag(lint_non_glob_import_type_ir_inherent)]
pub(crate) struct NonGlobImportTypeIrInherent { pub(crate) struct NonGlobImportTypeIrInherent {

View file

@ -104,8 +104,10 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions {
// determining if we are in a doctest context can't currently be determined // determining if we are in a doctest context can't currently be determined
// by the code itself (there are no specific attributes), but fortunately rustdoc // by the code itself (there are no specific attributes), but fortunately rustdoc
// sets a perma-unstable env var for libtest so we just reuse that for now // sets a perma-unstable env var for libtest so we just reuse that for now
let is_at_toplevel_doctest = let is_at_toplevel_doctest = || {
|| self.body_depth == 2 && std::env::var("UNSTABLE_RUSTDOC_TEST_PATH").is_ok(); self.body_depth == 2
&& cx.tcx.env_var_os("UNSTABLE_RUSTDOC_TEST_PATH".as_ref()).is_some()
};
match item.kind { match item.kind {
ItemKind::Impl(impl_) => { ItemKind::Impl(impl_) => {

View file

@ -1,3 +1,4 @@
use std::ffi::OsStr;
use std::intrinsics::transmute_unchecked; use std::intrinsics::transmute_unchecked;
use std::mem::MaybeUninit; use std::mem::MaybeUninit;
@ -67,6 +68,10 @@ impl<T> EraseType for &'_ [T] {
type Result = [u8; size_of::<&'static [()]>()]; type Result = [u8; size_of::<&'static [()]>()];
} }
impl EraseType for &'_ OsStr {
type Result = [u8; size_of::<&'static OsStr>()];
}
impl<T> EraseType for &'_ ty::List<T> { impl<T> EraseType for &'_ ty::List<T> {
type Result = [u8; size_of::<&'static ty::List<()>>()]; type Result = [u8; size_of::<&'static ty::List<()>>()];
} }
@ -174,6 +179,10 @@ impl<T> EraseType for Option<&'_ [T]> {
type Result = [u8; size_of::<Option<&'static [()]>>()]; type Result = [u8; size_of::<Option<&'static [()]>>()];
} }
impl EraseType for Option<&'_ OsStr> {
type Result = [u8; size_of::<Option<&'static OsStr>>()];
}
impl EraseType for Option<mir::DestructuredConstant<'_>> { impl EraseType for Option<mir::DestructuredConstant<'_>> {
type Result = [u8; size_of::<Option<mir::DestructuredConstant<'static>>>()]; type Result = [u8; size_of::<Option<mir::DestructuredConstant<'static>>>()];
} }

View file

@ -1,5 +1,7 @@
//! Defines the set of legal keys that can be used in queries. //! Defines the set of legal keys that can be used in queries.
use std::ffi::OsStr;
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId};
use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_hir::hir_id::{HirId, OwnerId};
use rustc_query_system::dep_graph::DepNodeIndex; use rustc_query_system::dep_graph::DepNodeIndex;
@ -498,6 +500,14 @@ impl Key for Option<Symbol> {
} }
} }
impl<'tcx> Key for &'tcx OsStr {
type Cache<V> = DefaultCache<Self, V>;
fn default_span(&self, _tcx: TyCtxt<'_>) -> Span {
DUMMY_SP
}
}
/// Canonical query goals correspond to abstract trait operations that /// Canonical query goals correspond to abstract trait operations that
/// are not tied to any crate in particular. /// are not tied to any crate in particular.
impl<'tcx, T: Clone> Key for CanonicalQueryInput<'tcx, T> { impl<'tcx, T: Clone> Key for CanonicalQueryInput<'tcx, T> {

View file

@ -6,6 +6,7 @@
#![allow(unused_parens)] #![allow(unused_parens)]
use std::ffi::OsStr;
use std::mem; use std::mem;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -30,7 +31,9 @@ use rustc_index::IndexVec;
use rustc_lint_defs::LintId; use rustc_lint_defs::LintId;
use rustc_macros::rustc_queries; use rustc_macros::rustc_queries;
use rustc_query_system::ich::StableHashingContext; use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{QueryCache, QueryMode, QueryState, try_get_cached}; use rustc_query_system::query::{
QueryCache, QueryMode, QueryStackDeferred, QueryState, try_get_cached,
};
use rustc_session::Limits; use rustc_session::Limits;
use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion}; use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion};
use rustc_session::cstore::{ use rustc_session::cstore::{
@ -119,6 +122,21 @@ rustc_queries! {
desc { "perform lints prior to AST lowering" } desc { "perform lints prior to AST lowering" }
} }
/// Tracked access to environment variables.
///
/// Useful for the implementation of `std::env!`, `proc-macro`s change
/// detection and other changes in the compiler's behaviour that is easier
/// to control with an environment variable than a flag.
///
/// NOTE: This currently does not work with dependency info in the
/// analysis, codegen and linking passes, place extra code at the top of
/// `rustc_interface::passes::write_dep_info` to make that work.
query env_var_os(key: &'tcx OsStr) -> Option<&'tcx OsStr> {
// Environment variables are global state
eval_always
desc { "get the value of an environment variable" }
}
query resolutions(_: ()) -> &'tcx ty::ResolverGlobalCtxt { query resolutions(_: ()) -> &'tcx ty::ResolverGlobalCtxt {
no_hash no_hash
desc { "getting the resolver outputs" } desc { "getting the resolver outputs" }

View file

@ -488,7 +488,7 @@ macro_rules! define_callbacks {
#[derive(Default)] #[derive(Default)]
pub struct QueryStates<'tcx> { pub struct QueryStates<'tcx> {
$( $(
pub $name: QueryState<$($K)*>, pub $name: QueryState<$($K)*, QueryStackDeferred<'tcx>>,
)* )*
} }

View file

@ -7,6 +7,8 @@ pub mod tls;
use std::assert_matches::{assert_matches, debug_assert_matches}; use std::assert_matches::{assert_matches, debug_assert_matches};
use std::borrow::Borrow; use std::borrow::Borrow;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::env::VarError;
use std::ffi::OsStr;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::{Bound, Deref}; use std::ops::{Bound, Deref};
@ -1883,6 +1885,15 @@ impl<'tcx> TyCtxt<'tcx> {
} }
None None
} }
/// Helper to get a tracked environment variable via. [`TyCtxt::env_var_os`] and converting to
/// UTF-8 like [`std::env::var`].
pub fn env_var<K: ?Sized + AsRef<OsStr>>(self, key: &'tcx K) -> Result<&'tcx str, VarError> {
match self.env_var_os(key.as_ref()) {
Some(value) => value.to_str().ok_or_else(|| VarError::NotUnicode(value.to_os_string())),
None => Err(VarError::NotPresent),
}
}
} }
impl<'tcx> TyCtxtAt<'tcx> { impl<'tcx> TyCtxtAt<'tcx> {

View file

@ -88,7 +88,7 @@ impl<'tcx> Value<TyCtxt<'tcx>> for Representability {
if info.query.dep_kind == dep_kinds::representability if info.query.dep_kind == dep_kinds::representability
&& let Some(field_id) = info.query.def_id && let Some(field_id) = info.query.def_id
&& let Some(field_id) = field_id.as_local() && let Some(field_id) = field_id.as_local()
&& let Some(DefKind::Field) = info.query.def_kind && let Some(DefKind::Field) = info.query.info.def_kind
{ {
let parent_id = tcx.parent(field_id.to_def_id()); let parent_id = tcx.parent(field_id.to_def_id());
let item_id = match tcx.def_kind(parent_id) { let item_id = match tcx.def_kind(parent_id) {
@ -216,7 +216,7 @@ impl<'tcx, T> Value<TyCtxt<'tcx>> for Result<T, &'_ ty::layout::LayoutError<'_>>
continue; continue;
}; };
let frame_span = let frame_span =
frame.query.default_span(cycle[(i + 1) % cycle.len()].span); frame.query.info.default_span(cycle[(i + 1) % cycle.len()].span);
if frame_span.is_dummy() { if frame_span.is_dummy() {
continue; continue;
} }

View file

@ -225,13 +225,13 @@ use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion};
use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::layout::ValidityRequirement;
use rustc_middle::ty::print::{shrunk_instance_name, with_no_trimmed_paths}; use rustc_middle::ty::print::{shrunk_instance_name, with_no_trimmed_paths};
use rustc_middle::ty::{ use rustc_middle::ty::{
self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Interner, Ty, TyCtxt, self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Ty, TyCtxt, TypeFoldable,
TypeFoldable, TypeVisitableExt, VtblEntry, TypeVisitableExt, VtblEntry,
}; };
use rustc_middle::util::Providers; use rustc_middle::util::Providers;
use rustc_middle::{bug, span_bug}; use rustc_middle::{bug, span_bug};
use rustc_session::Limit; use rustc_session::Limit;
use rustc_session::config::EntryFnType; use rustc_session::config::{DebugInfo, EntryFnType};
use rustc_span::source_map::{Spanned, dummy_spanned, respan}; use rustc_span::source_map::{Spanned, dummy_spanned, respan};
use rustc_span::{DUMMY_SP, Span}; use rustc_span::{DUMMY_SP, Span};
use tracing::{debug, instrument, trace}; use tracing::{debug, instrument, trace};
@ -967,7 +967,7 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) ->
{ {
// `#[rustc_force_inline]` items should never be codegened. This should be caught by // `#[rustc_force_inline]` items should never be codegened. This should be caught by
// the MIR validator. // the MIR validator.
tcx.delay_bug("attempt to codegen `#[rustc_force_inline]` item"); tcx.dcx().delayed_bug("attempt to codegen `#[rustc_force_inline]` item");
} }
if def_id.is_local() { if def_id.is_local() {
@ -1235,6 +1235,11 @@ fn collect_items_of_instance<'tcx>(
}; };
if mode == CollectionMode::UsedItems { if mode == CollectionMode::UsedItems {
if tcx.sess.opts.debuginfo == DebugInfo::Full {
for var_debug_info in &body.var_debug_info {
collector.visit_var_debug_info(var_debug_info);
}
}
for (bb, data) in traversal::mono_reachable(body, tcx, instance) { for (bb, data) in traversal::mono_reachable(body, tcx, instance) {
collector.visit_basic_block_data(bb, data) collector.visit_basic_block_data(bb, data)
} }

View file

@ -6,6 +6,7 @@
// tidy-alphabetical-start // tidy-alphabetical-start
#![allow(rustc::usage_of_type_ir_inherent)] #![allow(rustc::usage_of_type_ir_inherent)]
#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))]
// tidy-alphabetical-end // tidy-alphabetical-end
pub mod canonicalizer; pub mod canonicalizer;

View file

@ -26,8 +26,8 @@ use rustc_middle::ty::TyCtxt;
use rustc_query_system::dep_graph::SerializedDepNodeIndex; use rustc_query_system::dep_graph::SerializedDepNodeIndex;
use rustc_query_system::ich::StableHashingContext; use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{ use rustc_query_system::query::{
CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryState, CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryStackDeferred,
get_query_incr, get_query_non_incr, QueryState, get_query_incr, get_query_non_incr,
}; };
use rustc_query_system::{HandleCycleError, Value}; use rustc_query_system::{HandleCycleError, Value};
use rustc_span::{ErrorGuaranteed, Span}; use rustc_span::{ErrorGuaranteed, Span};
@ -84,7 +84,10 @@ where
} }
#[inline(always)] #[inline(always)]
fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState<Self::Key> fn query_state<'a>(
self,
qcx: QueryCtxt<'tcx>,
) -> &'a QueryState<Self::Key, QueryStackDeferred<'tcx>>
where where
QueryCtxt<'tcx>: 'a, QueryCtxt<'tcx>: 'a,
{ {
@ -93,7 +96,7 @@ where
unsafe { unsafe {
&*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>) &*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>)
.byte_add(self.dynamic.query_state) .byte_add(self.dynamic.query_state)
.cast::<QueryState<Self::Key>>() .cast::<QueryState<Self::Key, QueryStackDeferred<'tcx>>>()
} }
} }

View file

@ -5,6 +5,7 @@
use std::num::NonZero; use std::num::NonZero;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_data_structures::unord::UnordMap; use rustc_data_structures::unord::UnordMap;
use rustc_hashes::Hash64; use rustc_hashes::Hash64;
use rustc_index::Idx; use rustc_index::Idx;
@ -24,8 +25,8 @@ use rustc_middle::ty::{self, TyCtxt};
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext}; use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
use rustc_query_system::ich::StableHashingContext; use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{ use rustc_query_system::query::{
QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, QueryStackFrame, QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect,
force_query, QueryStackDeferred, QueryStackFrame, QueryStackFrameExtra, force_query,
}; };
use rustc_query_system::{QueryOverflow, QueryOverflowNote}; use rustc_query_system::{QueryOverflow, QueryOverflowNote};
use rustc_serialize::{Decodable, Encodable}; use rustc_serialize::{Decodable, Encodable};
@ -65,7 +66,9 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
} }
} }
impl QueryContext for QueryCtxt<'_> { impl<'tcx> QueryContext for QueryCtxt<'tcx> {
type QueryInfo = QueryStackDeferred<'tcx>;
#[inline] #[inline]
fn next_job_id(self) -> QueryJobId { fn next_job_id(self) -> QueryJobId {
QueryJobId( QueryJobId(
@ -82,7 +85,9 @@ impl QueryContext for QueryCtxt<'_> {
/// Returns a query map representing active query jobs. /// Returns a query map representing active query jobs.
/// It returns an incomplete map as an error if it fails /// It returns an incomplete map as an error if it fails
/// to take locks. /// to take locks.
fn collect_active_jobs(self) -> Result<QueryMap, QueryMap> { fn collect_active_jobs(
self,
) -> Result<QueryMap<QueryStackDeferred<'tcx>>, QueryMap<QueryStackDeferred<'tcx>>> {
let mut jobs = QueryMap::default(); let mut jobs = QueryMap::default();
let mut complete = true; let mut complete = true;
@ -95,6 +100,13 @@ impl QueryContext for QueryCtxt<'_> {
if complete { Ok(jobs) } else { Err(jobs) } if complete { Ok(jobs) } else { Err(jobs) }
} }
fn lift_query_info(
self,
info: &QueryStackDeferred<'tcx>,
) -> rustc_query_system::query::QueryStackFrameExtra {
info.extract()
}
// Interactions with on_disk_cache // Interactions with on_disk_cache
fn load_side_effect( fn load_side_effect(
self, self,
@ -159,7 +171,10 @@ impl QueryContext for QueryCtxt<'_> {
self.sess.dcx().emit_fatal(QueryOverflow { self.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span, span: info.job.span,
note: QueryOverflowNote { desc: info.query.description, depth }, note: QueryOverflowNote {
desc: self.lift_query_info(&info.query.info).description,
depth,
},
suggested_limit, suggested_limit,
crate_name: self.crate_name(LOCAL_CRATE), crate_name: self.crate_name(LOCAL_CRATE),
}); });
@ -296,16 +311,17 @@ macro_rules! should_ever_cache_on_disk {
}; };
} }
pub(crate) fn create_query_frame< fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>(
'tcx, (tcx, key, kind, name, do_describe): (
K: Copy + Key + for<'a> HashStable<StableHashingContext<'a>>, TyCtxt<'tcx>,
>( K,
tcx: TyCtxt<'tcx>, DepKind,
do_describe: fn(TyCtxt<'tcx>, K) -> String, &'static str,
key: K, fn(TyCtxt<'tcx>, K) -> String,
kind: DepKind, ),
name: &'static str, ) -> QueryStackFrameExtra {
) -> QueryStackFrame { let def_id = key.key_as_def_id();
// If reduced queries are requested, we may be printing a query stack due // If reduced queries are requested, we may be printing a query stack due
// to a panic. Avoid using `default_span` and `def_kind` in that case. // to a panic. Avoid using `default_span` and `def_kind` in that case.
let reduce_queries = with_reduced_queries(); let reduce_queries = with_reduced_queries();
@ -324,13 +340,28 @@ pub(crate) fn create_query_frame<
} else { } else {
Some(key.default_span(tcx)) Some(key.default_span(tcx))
}; };
let def_id = key.key_as_def_id();
let def_kind = if kind == dep_graph::dep_kinds::def_kind || reduce_queries { let def_kind = if kind == dep_graph::dep_kinds::def_kind || reduce_queries {
// Try to avoid infinite recursion. // Try to avoid infinite recursion.
None None
} else { } else {
def_id.and_then(|def_id| def_id.as_local()).map(|def_id| tcx.def_kind(def_id)) def_id.and_then(|def_id| def_id.as_local()).map(|def_id| tcx.def_kind(def_id))
}; };
QueryStackFrameExtra::new(description, span, def_kind)
}
pub(crate) fn create_query_frame<
'tcx,
K: Copy + DynSend + DynSync + Key + for<'a> HashStable<StableHashingContext<'a>> + 'tcx,
>(
tcx: TyCtxt<'tcx>,
do_describe: fn(TyCtxt<'tcx>, K) -> String,
key: K,
kind: DepKind,
name: &'static str,
) -> QueryStackFrame<QueryStackDeferred<'tcx>> {
let def_id = key.key_as_def_id();
let hash = || { let hash = || {
tcx.with_stable_hashing_context(|mut hcx| { tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new(); let mut hasher = StableHasher::new();
@ -341,7 +372,10 @@ pub(crate) fn create_query_frame<
}; };
let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle(); let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle();
QueryStackFrame::new(description, span, def_id, def_kind, kind, def_id_for_ty_in_cycle, hash) let info =
QueryStackDeferred::new((tcx, key, kind, name, do_describe), create_query_frame_extra);
QueryStackFrame::new(info, kind, hash, def_id, def_id_for_ty_in_cycle)
} }
pub(crate) fn encode_query_results<'a, 'tcx, Q>( pub(crate) fn encode_query_results<'a, 'tcx, Q>(
@ -688,7 +722,10 @@ macro_rules! define_queries {
} }
} }
pub(crate) fn try_collect_active_jobs<'tcx>(tcx: TyCtxt<'tcx>, qmap: &mut QueryMap) -> Option<()> { pub(crate) fn try_collect_active_jobs<'tcx>(
tcx: TyCtxt<'tcx>,
qmap: &mut QueryMap<QueryStackDeferred<'tcx>>,
) -> Option<()> {
let make_query = |tcx, key| { let make_query = |tcx, key| {
let kind = rustc_middle::dep_graph::dep_kinds::$name; let kind = rustc_middle::dep_graph::dep_kinds::$name;
let name = stringify!($name); let name = stringify!($name);
@ -768,7 +805,9 @@ macro_rules! define_queries {
// These arrays are used for iteration and can't be indexed by `DepKind`. // These arrays are used for iteration and can't be indexed by `DepKind`.
const TRY_COLLECT_ACTIVE_JOBS: &[for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap) -> Option<()>] = const TRY_COLLECT_ACTIVE_JOBS: &[
for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap<QueryStackDeferred<'tcx>>) -> Option<()>
] =
&[$(query_impl::$name::try_collect_active_jobs),*]; &[$(query_impl::$name::try_collect_active_jobs),*];
const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[ const ALLOC_SELF_PROFILE_QUERY_STRINGS: &[

View file

@ -6,6 +6,7 @@ use std::hash::Hash;
use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fingerprint::Fingerprint;
use rustc_span::ErrorGuaranteed; use rustc_span::ErrorGuaranteed;
use super::QueryStackFrameExtra;
use crate::dep_graph::{DepKind, DepNode, DepNodeParams, SerializedDepNodeIndex}; use crate::dep_graph::{DepKind, DepNode, DepNodeParams, SerializedDepNodeIndex};
use crate::error::HandleCycleError; use crate::error::HandleCycleError;
use crate::ich::StableHashingContext; use crate::ich::StableHashingContext;
@ -27,7 +28,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn format_value(self) -> fn(&Self::Value) -> String; fn format_value(self) -> fn(&Self::Value) -> String;
// Don't use this method to access query results, instead use the methods on TyCtxt // Don't use this method to access query results, instead use the methods on TyCtxt
fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key> fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState<Self::Key, Qcx::QueryInfo>
where where
Qcx: 'a; Qcx: 'a;
@ -57,7 +58,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn value_from_cycle_error( fn value_from_cycle_error(
self, self,
tcx: Qcx::DepContext, tcx: Qcx::DepContext,
cycle_error: &CycleError, cycle_error: &CycleError<QueryStackFrameExtra>,
guar: ErrorGuaranteed, guar: ErrorGuaranteed,
) -> Self::Value; ) -> Self::Value;

View file

@ -1,3 +1,4 @@
use std::fmt::Debug;
use std::hash::Hash; use std::hash::Hash;
use std::io::Write; use std::io::Write;
use std::iter; use std::iter;
@ -12,6 +13,7 @@ use rustc_hir::def::DefKind;
use rustc_session::Session; use rustc_session::Session;
use rustc_span::{DUMMY_SP, Span}; use rustc_span::{DUMMY_SP, Span};
use super::QueryStackFrameExtra;
use crate::dep_graph::DepContext; use crate::dep_graph::DepContext;
use crate::error::CycleStack; use crate::error::CycleStack;
use crate::query::plumbing::CycleError; use crate::query::plumbing::CycleError;
@ -19,45 +21,54 @@ use crate::query::{QueryContext, QueryStackFrame};
/// Represents a span and a query key. /// Represents a span and a query key.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct QueryInfo { pub struct QueryInfo<I> {
/// The span corresponding to the reason for which this query was required. /// The span corresponding to the reason for which this query was required.
pub span: Span, pub span: Span,
pub query: QueryStackFrame, pub query: QueryStackFrame<I>,
} }
pub type QueryMap = FxHashMap<QueryJobId, QueryJobInfo>; impl<I> QueryInfo<I> {
pub(crate) fn lift<Qcx: QueryContext<QueryInfo = I>>(
&self,
qcx: Qcx,
) -> QueryInfo<QueryStackFrameExtra> {
QueryInfo { span: self.span, query: self.query.lift(qcx) }
}
}
pub type QueryMap<I> = FxHashMap<QueryJobId, QueryJobInfo<I>>;
/// A value uniquely identifying an active query job. /// A value uniquely identifying an active query job.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct QueryJobId(pub NonZero<u64>); pub struct QueryJobId(pub NonZero<u64>);
impl QueryJobId { impl QueryJobId {
fn query(self, map: &QueryMap) -> QueryStackFrame { fn query<I: Clone>(self, map: &QueryMap<I>) -> QueryStackFrame<I> {
map.get(&self).unwrap().query.clone() map.get(&self).unwrap().query.clone()
} }
fn span(self, map: &QueryMap) -> Span { fn span<I>(self, map: &QueryMap<I>) -> Span {
map.get(&self).unwrap().job.span map.get(&self).unwrap().job.span
} }
fn parent(self, map: &QueryMap) -> Option<QueryJobId> { fn parent<I>(self, map: &QueryMap<I>) -> Option<QueryJobId> {
map.get(&self).unwrap().job.parent map.get(&self).unwrap().job.parent
} }
fn latch(self, map: &QueryMap) -> Option<&QueryLatch> { fn latch<I>(self, map: &QueryMap<I>) -> Option<&QueryLatch<I>> {
map.get(&self).unwrap().job.latch.as_ref() map.get(&self).unwrap().job.latch.as_ref()
} }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct QueryJobInfo { pub struct QueryJobInfo<I> {
pub query: QueryStackFrame, pub query: QueryStackFrame<I>,
pub job: QueryJob, pub job: QueryJob<I>,
} }
/// Represents an active query job. /// Represents an active query job.
#[derive(Clone, Debug)] #[derive(Debug)]
pub struct QueryJob { pub struct QueryJob<I> {
pub id: QueryJobId, pub id: QueryJobId,
/// The span corresponding to the reason for which this query was required. /// The span corresponding to the reason for which this query was required.
@ -67,17 +78,23 @@ pub struct QueryJob {
pub parent: Option<QueryJobId>, pub parent: Option<QueryJobId>,
/// The latch that is used to wait on this job. /// The latch that is used to wait on this job.
latch: Option<QueryLatch>, latch: Option<QueryLatch<I>>,
} }
impl QueryJob { impl<I> Clone for QueryJob<I> {
fn clone(&self) -> Self {
Self { id: self.id, span: self.span, parent: self.parent, latch: self.latch.clone() }
}
}
impl<I> QueryJob<I> {
/// Creates a new query job. /// Creates a new query job.
#[inline] #[inline]
pub fn new(id: QueryJobId, span: Span, parent: Option<QueryJobId>) -> Self { pub fn new(id: QueryJobId, span: Span, parent: Option<QueryJobId>) -> Self {
QueryJob { id, span, parent, latch: None } QueryJob { id, span, parent, latch: None }
} }
pub(super) fn latch(&mut self) -> QueryLatch { pub(super) fn latch(&mut self) -> QueryLatch<I> {
if self.latch.is_none() { if self.latch.is_none() {
self.latch = Some(QueryLatch::new()); self.latch = Some(QueryLatch::new());
} }
@ -97,12 +114,12 @@ impl QueryJob {
} }
impl QueryJobId { impl QueryJobId {
pub(super) fn find_cycle_in_stack( pub(super) fn find_cycle_in_stack<I: Clone>(
&self, &self,
query_map: QueryMap, query_map: QueryMap<I>,
current_job: &Option<QueryJobId>, current_job: &Option<QueryJobId>,
span: Span, span: Span,
) -> CycleError { ) -> CycleError<I> {
// Find the waitee amongst `current_job` parents // Find the waitee amongst `current_job` parents
let mut cycle = Vec::new(); let mut cycle = Vec::new();
let mut current_job = Option::clone(current_job); let mut current_job = Option::clone(current_job);
@ -136,7 +153,7 @@ impl QueryJobId {
#[cold] #[cold]
#[inline(never)] #[inline(never)]
pub fn find_dep_kind_root(&self, query_map: QueryMap) -> (QueryJobInfo, usize) { pub fn find_dep_kind_root<I: Clone>(&self, query_map: QueryMap<I>) -> (QueryJobInfo<I>, usize) {
let mut depth = 1; let mut depth = 1;
let info = query_map.get(&self).unwrap(); let info = query_map.get(&self).unwrap();
let dep_kind = info.query.dep_kind; let dep_kind = info.query.dep_kind;
@ -156,25 +173,31 @@ impl QueryJobId {
} }
#[derive(Debug)] #[derive(Debug)]
struct QueryWaiter { struct QueryWaiter<I> {
query: Option<QueryJobId>, query: Option<QueryJobId>,
condvar: Condvar, condvar: Condvar,
span: Span, span: Span,
cycle: Mutex<Option<CycleError>>, cycle: Mutex<Option<CycleError<I>>>,
} }
#[derive(Debug)] #[derive(Debug)]
struct QueryLatchInfo { struct QueryLatchInfo<I> {
complete: bool, complete: bool,
waiters: Vec<Arc<QueryWaiter>>, waiters: Vec<Arc<QueryWaiter<I>>>,
} }
#[derive(Clone, Debug)] #[derive(Debug)]
pub(super) struct QueryLatch { pub(super) struct QueryLatch<I> {
info: Arc<Mutex<QueryLatchInfo>>, info: Arc<Mutex<QueryLatchInfo<I>>>,
} }
impl QueryLatch { impl<I> Clone for QueryLatch<I> {
fn clone(&self) -> Self {
Self { info: Arc::clone(&self.info) }
}
}
impl<I> QueryLatch<I> {
fn new() -> Self { fn new() -> Self {
QueryLatch { QueryLatch {
info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })), info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })),
@ -182,7 +205,11 @@ impl QueryLatch {
} }
/// Awaits for the query job to complete. /// Awaits for the query job to complete.
pub(super) fn wait_on(&self, query: Option<QueryJobId>, span: Span) -> Result<(), CycleError> { pub(super) fn wait_on(
&self,
query: Option<QueryJobId>,
span: Span,
) -> Result<(), CycleError<I>> {
let waiter = let waiter =
Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() }); Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() });
self.wait_on_inner(&waiter); self.wait_on_inner(&waiter);
@ -197,7 +224,7 @@ impl QueryLatch {
} }
/// Awaits the caller on this latch by blocking the current thread. /// Awaits the caller on this latch by blocking the current thread.
fn wait_on_inner(&self, waiter: &Arc<QueryWaiter>) { fn wait_on_inner(&self, waiter: &Arc<QueryWaiter<I>>) {
let mut info = self.info.lock(); let mut info = self.info.lock();
if !info.complete { if !info.complete {
// We push the waiter on to the `waiters` list. It can be accessed inside // We push the waiter on to the `waiters` list. It can be accessed inside
@ -232,7 +259,7 @@ impl QueryLatch {
/// Removes a single waiter from the list of waiters. /// Removes a single waiter from the list of waiters.
/// This is used to break query cycles. /// This is used to break query cycles.
fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter> { fn extract_waiter(&self, waiter: usize) -> Arc<QueryWaiter<I>> {
let mut info = self.info.lock(); let mut info = self.info.lock();
debug_assert!(!info.complete); debug_assert!(!info.complete);
// Remove the waiter from the list of waiters // Remove the waiter from the list of waiters
@ -252,7 +279,11 @@ type Waiter = (QueryJobId, usize);
/// For visits of resumable waiters it returns Some(Some(Waiter)) which has the /// For visits of resumable waiters it returns Some(Some(Waiter)) which has the
/// required information to resume the waiter. /// required information to resume the waiter.
/// If all `visit` calls returns None, this function also returns None. /// If all `visit` calls returns None, this function also returns None.
fn visit_waiters<F>(query_map: &QueryMap, query: QueryJobId, mut visit: F) -> Option<Option<Waiter>> fn visit_waiters<I, F>(
query_map: &QueryMap<I>,
query: QueryJobId,
mut visit: F,
) -> Option<Option<Waiter>>
where where
F: FnMut(Span, QueryJobId) -> Option<Option<Waiter>>, F: FnMut(Span, QueryJobId) -> Option<Option<Waiter>>,
{ {
@ -282,8 +313,8 @@ where
/// `span` is the reason for the `query` to execute. This is initially DUMMY_SP. /// `span` is the reason for the `query` to execute. This is initially DUMMY_SP.
/// If a cycle is detected, this initial value is replaced with the span causing /// If a cycle is detected, this initial value is replaced with the span causing
/// the cycle. /// the cycle.
fn cycle_check( fn cycle_check<I>(
query_map: &QueryMap, query_map: &QueryMap<I>,
query: QueryJobId, query: QueryJobId,
span: Span, span: Span,
stack: &mut Vec<(Span, QueryJobId)>, stack: &mut Vec<(Span, QueryJobId)>,
@ -322,8 +353,8 @@ fn cycle_check(
/// Finds out if there's a path to the compiler root (aka. code which isn't in a query) /// Finds out if there's a path to the compiler root (aka. code which isn't in a query)
/// from `query` without going through any of the queries in `visited`. /// from `query` without going through any of the queries in `visited`.
/// This is achieved with a depth first search. /// This is achieved with a depth first search.
fn connected_to_root( fn connected_to_root<I>(
query_map: &QueryMap, query_map: &QueryMap<I>,
query: QueryJobId, query: QueryJobId,
visited: &mut FxHashSet<QueryJobId>, visited: &mut FxHashSet<QueryJobId>,
) -> bool { ) -> bool {
@ -344,7 +375,7 @@ fn connected_to_root(
} }
// Deterministically pick an query from a list // Deterministically pick an query from a list
fn pick_query<'a, T, F>(query_map: &QueryMap, queries: &'a [T], f: F) -> &'a T fn pick_query<'a, I: Clone, T, F>(query_map: &QueryMap<I>, queries: &'a [T], f: F) -> &'a T
where where
F: Fn(&T) -> (Span, QueryJobId), F: Fn(&T) -> (Span, QueryJobId),
{ {
@ -369,10 +400,10 @@ where
/// the function return true. /// the function return true.
/// If a cycle was not found, the starting query is removed from `jobs` and /// If a cycle was not found, the starting query is removed from `jobs` and
/// the function returns false. /// the function returns false.
fn remove_cycle( fn remove_cycle<I: Clone>(
query_map: &QueryMap, query_map: &QueryMap<I>,
jobs: &mut Vec<QueryJobId>, jobs: &mut Vec<QueryJobId>,
wakelist: &mut Vec<Arc<QueryWaiter>>, wakelist: &mut Vec<Arc<QueryWaiter<I>>>,
) -> bool { ) -> bool {
let mut visited = FxHashSet::default(); let mut visited = FxHashSet::default();
let mut stack = Vec::new(); let mut stack = Vec::new();
@ -473,7 +504,10 @@ fn remove_cycle(
/// uses a query latch and then resuming that waiter. /// uses a query latch and then resuming that waiter.
/// There may be multiple cycles involved in a deadlock, so this searches /// There may be multiple cycles involved in a deadlock, so this searches
/// all active queries for cycles before finally resuming all the waiters at once. /// all active queries for cycles before finally resuming all the waiters at once.
pub fn break_query_cycles(query_map: QueryMap, registry: &rayon_core::Registry) { pub fn break_query_cycles<I: Clone + Debug>(
query_map: QueryMap<I>,
registry: &rayon_core::Registry,
) {
let mut wakelist = Vec::new(); let mut wakelist = Vec::new();
let mut jobs: Vec<QueryJobId> = query_map.keys().cloned().collect(); let mut jobs: Vec<QueryJobId> = query_map.keys().cloned().collect();
@ -520,7 +554,7 @@ pub fn report_cycle<'a>(
) -> Diag<'a> { ) -> Diag<'a> {
assert!(!stack.is_empty()); assert!(!stack.is_empty());
let span = stack[0].query.default_span(stack[1 % stack.len()].span); let span = stack[0].query.info.default_span(stack[1 % stack.len()].span);
let mut cycle_stack = Vec::new(); let mut cycle_stack = Vec::new();
@ -529,31 +563,31 @@ pub fn report_cycle<'a>(
for i in 1..stack.len() { for i in 1..stack.len() {
let query = &stack[i].query; let query = &stack[i].query;
let span = query.default_span(stack[(i + 1) % stack.len()].span); let span = query.info.default_span(stack[(i + 1) % stack.len()].span);
cycle_stack.push(CycleStack { span, desc: query.description.to_owned() }); cycle_stack.push(CycleStack { span, desc: query.info.description.to_owned() });
} }
let mut cycle_usage = None; let mut cycle_usage = None;
if let Some((span, ref query)) = *usage { if let Some((span, ref query)) = *usage {
cycle_usage = Some(crate::error::CycleUsage { cycle_usage = Some(crate::error::CycleUsage {
span: query.default_span(span), span: query.info.default_span(span),
usage: query.description.to_string(), usage: query.info.description.to_string(),
}); });
} }
let alias = if stack.iter().all(|entry| matches!(entry.query.def_kind, Some(DefKind::TyAlias))) let alias =
{ if stack.iter().all(|entry| matches!(entry.query.info.def_kind, Some(DefKind::TyAlias))) {
Some(crate::error::Alias::Ty) Some(crate::error::Alias::Ty)
} else if stack.iter().all(|entry| entry.query.def_kind == Some(DefKind::TraitAlias)) { } else if stack.iter().all(|entry| entry.query.info.def_kind == Some(DefKind::TraitAlias)) {
Some(crate::error::Alias::Trait) Some(crate::error::Alias::Trait)
} else { } else {
None None
}; };
let cycle_diag = crate::error::Cycle { let cycle_diag = crate::error::Cycle {
span, span,
cycle_stack, cycle_stack,
stack_bottom: stack[0].query.description.to_owned(), stack_bottom: stack[0].query.info.description.to_owned(),
alias, alias,
cycle_usage, cycle_usage,
stack_count, stack_count,
@ -589,6 +623,7 @@ pub fn print_query_stack<Qcx: QueryContext>(
let Some(query_info) = query_map.get(&query) else { let Some(query_info) = query_map.get(&query) else {
break; break;
}; };
let query_extra = qcx.lift_query_info(&query_info.query.info);
if Some(count_printed) < limit_frames || limit_frames.is_none() { if Some(count_printed) < limit_frames || limit_frames.is_none() {
// Only print to stderr as many stack frames as `num_frames` when present. // Only print to stderr as many stack frames as `num_frames` when present.
// FIXME: needs translation // FIXME: needs translation
@ -596,7 +631,7 @@ pub fn print_query_stack<Qcx: QueryContext>(
#[allow(rustc::untranslatable_diagnostic)] #[allow(rustc::untranslatable_diagnostic)]
dcx.struct_failure_note(format!( dcx.struct_failure_note(format!(
"#{} [{:?}] {}", "#{} [{:?}] {}",
count_printed, query_info.query.dep_kind, query_info.query.description count_printed, query_info.query.dep_kind, query_extra.description
)) ))
.with_span(query_info.job.span) .with_span(query_info.job.span)
.emit(); .emit();
@ -609,7 +644,7 @@ pub fn print_query_stack<Qcx: QueryContext>(
"#{} [{}] {}", "#{} [{}] {}",
count_total, count_total,
qcx.dep_context().dep_kind_info(query_info.query.dep_kind).name, qcx.dep_context().dep_kind_info(query_info.query.dep_kind).name,
query_info.query.description query_extra.description
); );
} }

View file

@ -1,4 +1,9 @@
mod plumbing; mod plumbing;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::mem::transmute;
use std::sync::Arc;
pub use self::plumbing::*; pub use self::plumbing::*;
mod job; mod job;
@ -11,6 +16,7 @@ mod caches;
pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache}; pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache};
mod config; mod config;
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_errors::DiagInner; use rustc_errors::DiagInner;
use rustc_hashes::Hash64; use rustc_hashes::Hash64;
use rustc_hir::def::DefKind; use rustc_hir::def::DefKind;
@ -25,31 +31,59 @@ use crate::dep_graph::{DepKind, DepNodeIndex, HasDepContext, SerializedDepNodeIn
/// ///
/// This is mostly used in case of cycles for error reporting. /// This is mostly used in case of cycles for error reporting.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct QueryStackFrame { pub struct QueryStackFrame<I> {
pub description: String, /// This field initially stores a `QueryStackDeferred` during collection,
span: Option<Span>, /// but can later be changed to `QueryStackFrameExtra` containing concrete information
pub def_id: Option<DefId>, /// by calling `lift`. This is done so that collecting query does not need to invoke
pub def_kind: Option<DefKind>, /// queries, instead `lift` will call queries in a more appropriate location.
/// A def-id that is extracted from a `Ty` in a query key pub info: I,
pub def_id_for_ty_in_cycle: Option<DefId>,
pub dep_kind: DepKind, pub dep_kind: DepKind,
/// This hash is used to deterministically pick /// This hash is used to deterministically pick
/// a query to remove cycles in the parallel compiler. /// a query to remove cycles in the parallel compiler.
hash: Hash64, hash: Hash64,
pub def_id: Option<DefId>,
/// A def-id that is extracted from a `Ty` in a query key
pub def_id_for_ty_in_cycle: Option<DefId>,
} }
impl QueryStackFrame { impl<I> QueryStackFrame<I> {
#[inline] #[inline]
pub fn new( pub fn new(
description: String, info: I,
span: Option<Span>,
def_id: Option<DefId>,
def_kind: Option<DefKind>,
dep_kind: DepKind, dep_kind: DepKind,
def_id_for_ty_in_cycle: Option<DefId>,
hash: impl FnOnce() -> Hash64, hash: impl FnOnce() -> Hash64,
def_id: Option<DefId>,
def_id_for_ty_in_cycle: Option<DefId>,
) -> Self { ) -> Self {
Self { description, span, def_id, def_kind, def_id_for_ty_in_cycle, dep_kind, hash: hash() } Self { info, def_id, dep_kind, hash: hash(), def_id_for_ty_in_cycle }
}
fn lift<Qcx: QueryContext<QueryInfo = I>>(
&self,
qcx: Qcx,
) -> QueryStackFrame<QueryStackFrameExtra> {
QueryStackFrame {
info: qcx.lift_query_info(&self.info),
dep_kind: self.dep_kind,
hash: self.hash,
def_id: self.def_id,
def_id_for_ty_in_cycle: self.def_id_for_ty_in_cycle,
}
}
}
#[derive(Clone, Debug)]
pub struct QueryStackFrameExtra {
pub description: String,
span: Option<Span>,
pub def_kind: Option<DefKind>,
}
impl QueryStackFrameExtra {
#[inline]
pub fn new(description: String, span: Option<Span>, def_kind: Option<DefKind>) -> Self {
Self { description, span, def_kind }
} }
// FIXME(eddyb) Get more valid `Span`s on queries. // FIXME(eddyb) Get more valid `Span`s on queries.
@ -62,7 +96,41 @@ impl QueryStackFrame {
} }
} }
/// Track a 'side effects' for a particular query. /// Track a 'side effect' for a particular query.
/// This is used to hold a closure which can create `QueryStackFrameExtra`.
#[derive(Clone)]
pub struct QueryStackDeferred<'tcx> {
_dummy: PhantomData<&'tcx ()>,
// `extract` may contain references to 'tcx, but we can't tell drop checking that it won't
// access it in the destructor.
extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend>,
}
impl<'tcx> QueryStackDeferred<'tcx> {
pub fn new<C: Copy + DynSync + DynSend + 'tcx>(
context: C,
extract: fn(C) -> QueryStackFrameExtra,
) -> Self {
let extract: Arc<dyn Fn() -> QueryStackFrameExtra + DynSync + DynSend + 'tcx> =
Arc::new(move || extract(context));
// SAFETY: The `extract` closure does not access 'tcx in its destructor as the only
// captured variable is `context` which is Copy and cannot have a destructor.
Self { _dummy: PhantomData, extract: unsafe { transmute(extract) } }
}
pub fn extract(&self) -> QueryStackFrameExtra {
(self.extract)()
}
}
impl<'tcx> Debug for QueryStackDeferred<'tcx> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("QueryStackDeferred")
}
}
/// Tracks 'side effects' for a particular query.
/// This struct is saved to disk along with the query result, /// This struct is saved to disk along with the query result,
/// and loaded from disk if we mark the query as green. /// and loaded from disk if we mark the query as green.
/// This allows us to 'replay' changes to global state /// This allows us to 'replay' changes to global state
@ -81,12 +149,16 @@ pub enum QuerySideEffect {
} }
pub trait QueryContext: HasDepContext { pub trait QueryContext: HasDepContext {
type QueryInfo: Clone;
fn next_job_id(self) -> QueryJobId; fn next_job_id(self) -> QueryJobId;
/// Get the query information from the TLS context. /// Get the query information from the TLS context.
fn current_query_job(self) -> Option<QueryJobId>; fn current_query_job(self) -> Option<QueryJobId>;
fn collect_active_jobs(self) -> Result<QueryMap, QueryMap>; fn collect_active_jobs(self) -> Result<QueryMap<Self::QueryInfo>, QueryMap<Self::QueryInfo>>;
fn lift_query_info(self, info: &Self::QueryInfo) -> QueryStackFrameExtra;
/// Load a side effect associated to the node in the previous session. /// Load a side effect associated to the node in the previous session.
fn load_side_effect( fn load_side_effect(

View file

@ -16,7 +16,7 @@ use rustc_errors::{Diag, FatalError, StashKey};
use rustc_span::{DUMMY_SP, Span}; use rustc_span::{DUMMY_SP, Span};
use tracing::instrument; use tracing::instrument;
use super::QueryConfig; use super::{QueryConfig, QueryStackFrameExtra};
use crate::HandleCycleError; use crate::HandleCycleError;
use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams}; use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams};
use crate::ich::StableHashingContext; use crate::ich::StableHashingContext;
@ -29,23 +29,23 @@ fn equivalent_key<K: Eq, V>(k: &K) -> impl Fn(&(K, V)) -> bool + '_ {
move |x| x.0 == *k move |x| x.0 == *k
} }
pub struct QueryState<K> { pub struct QueryState<K, I> {
active: Sharded<hashbrown::HashTable<(K, QueryResult)>>, active: Sharded<hashbrown::HashTable<(K, QueryResult<I>)>>,
} }
/// Indicates the state of a query for a given key in a query map. /// Indicates the state of a query for a given key in a query map.
enum QueryResult { enum QueryResult<I> {
/// An already executing query. The query job can be used to await for its completion. /// An already executing query. The query job can be used to await for its completion.
Started(QueryJob), Started(QueryJob<I>),
/// The query panicked. Queries trying to wait on this will raise a fatal error which will /// The query panicked. Queries trying to wait on this will raise a fatal error which will
/// silently panic. /// silently panic.
Poisoned, Poisoned,
} }
impl QueryResult { impl<I> QueryResult<I> {
/// Unwraps the query job expecting that it has started. /// Unwraps the query job expecting that it has started.
fn expect_job(self) -> QueryJob { fn expect_job(self) -> QueryJob<I> {
match self { match self {
Self::Started(job) => job, Self::Started(job) => job,
Self::Poisoned => { Self::Poisoned => {
@ -55,7 +55,7 @@ impl QueryResult {
} }
} }
impl<K> QueryState<K> impl<K, I> QueryState<K, I>
where where
K: Eq + Hash + Copy + Debug, K: Eq + Hash + Copy + Debug,
{ {
@ -66,8 +66,8 @@ where
pub fn try_collect_active_jobs<Qcx: Copy>( pub fn try_collect_active_jobs<Qcx: Copy>(
&self, &self,
qcx: Qcx, qcx: Qcx,
make_query: fn(Qcx, K) -> QueryStackFrame, make_query: fn(Qcx, K) -> QueryStackFrame<I>,
jobs: &mut QueryMap, jobs: &mut QueryMap<I>,
) -> Option<()> { ) -> Option<()> {
let mut active = Vec::new(); let mut active = Vec::new();
@ -76,7 +76,7 @@ where
for shard in self.active.try_lock_shards() { for shard in self.active.try_lock_shards() {
for (k, v) in shard?.iter() { for (k, v) in shard?.iter() {
if let QueryResult::Started(ref job) = *v { if let QueryResult::Started(ref job) = *v {
active.push((*k, job.clone())); active.push((*k, (*job).clone()));
} }
} }
} }
@ -92,19 +92,19 @@ where
} }
} }
impl<K> Default for QueryState<K> { impl<K, I> Default for QueryState<K, I> {
fn default() -> QueryState<K> { fn default() -> QueryState<K, I> {
QueryState { active: Default::default() } QueryState { active: Default::default() }
} }
} }
/// A type representing the responsibility to execute the job in the `job` field. /// A type representing the responsibility to execute the job in the `job` field.
/// This will poison the relevant query if dropped. /// This will poison the relevant query if dropped.
struct JobOwner<'tcx, K> struct JobOwner<'tcx, K, I>
where where
K: Eq + Hash + Copy, K: Eq + Hash + Copy,
{ {
state: &'tcx QueryState<K>, state: &'tcx QueryState<K, I>,
key: K, key: K,
} }
@ -146,7 +146,7 @@ where
} }
Stash => { Stash => {
let guar = if let Some(root) = cycle_error.cycle.first() let guar = if let Some(root) = cycle_error.cycle.first()
&& let Some(span) = root.query.span && let Some(span) = root.query.info.span
{ {
error.stash(span, StashKey::Cycle).unwrap() error.stash(span, StashKey::Cycle).unwrap()
} else { } else {
@ -157,7 +157,7 @@ where
} }
} }
impl<'tcx, K> JobOwner<'tcx, K> impl<'tcx, K, I> JobOwner<'tcx, K, I>
where where
K: Eq + Hash + Copy, K: Eq + Hash + Copy,
{ {
@ -194,7 +194,7 @@ where
} }
} }
impl<'tcx, K> Drop for JobOwner<'tcx, K> impl<'tcx, K, I> Drop for JobOwner<'tcx, K, I>
where where
K: Eq + Hash + Copy, K: Eq + Hash + Copy,
{ {
@ -222,10 +222,19 @@ where
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct CycleError { pub struct CycleError<I = QueryStackFrameExtra> {
/// The query and related span that uses the cycle. /// The query and related span that uses the cycle.
pub usage: Option<(Span, QueryStackFrame)>, pub usage: Option<(Span, QueryStackFrame<I>)>,
pub cycle: Vec<QueryInfo>, pub cycle: Vec<QueryInfo<I>>,
}
impl<I> CycleError<I> {
fn lift<Qcx: QueryContext<QueryInfo = I>>(&self, qcx: Qcx) -> CycleError<QueryStackFrameExtra> {
CycleError {
usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))),
cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(),
}
}
} }
/// Checks whether there is already a value for this key in the in-memory /// Checks whether there is already a value for this key in the in-memory
@ -262,10 +271,10 @@ where
{ {
// Ensure there was no errors collecting all active jobs. // Ensure there was no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break. // We need the complete map to ensure we find a cycle to break.
let query_map = qcx.collect_active_jobs().expect("failed to collect active queries"); let query_map = qcx.collect_active_jobs().ok().expect("failed to collect active queries");
let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span); let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span);
(mk_cycle(query, qcx, error), None) (mk_cycle(query, qcx, error.lift(qcx)), None)
} }
#[inline(always)] #[inline(always)]
@ -274,7 +283,7 @@ fn wait_for_query<Q, Qcx>(
qcx: Qcx, qcx: Qcx,
span: Span, span: Span,
key: Q::Key, key: Q::Key,
latch: QueryLatch, latch: QueryLatch<Qcx::QueryInfo>,
current: Option<QueryJobId>, current: Option<QueryJobId>,
) -> (Q::Value, Option<DepNodeIndex>) ) -> (Q::Value, Option<DepNodeIndex>)
where where
@ -314,7 +323,7 @@ where
(v, Some(index)) (v, Some(index))
} }
Err(cycle) => (mk_cycle(query, qcx, cycle), None), Err(cycle) => (mk_cycle(query, qcx, cycle.lift(qcx)), None),
} }
} }
@ -392,7 +401,7 @@ where
fn execute_job<Q, Qcx, const INCR: bool>( fn execute_job<Q, Qcx, const INCR: bool>(
query: Q, query: Q,
qcx: Qcx, qcx: Qcx,
state: &QueryState<Q::Key>, state: &QueryState<Q::Key, Qcx::QueryInfo>,
key: Q::Key, key: Q::Key,
key_hash: u64, key_hash: u64,
id: QueryJobId, id: QueryJobId,

View file

@ -19,18 +19,15 @@ pub(crate) fn collect_definitions(
fragment: &AstFragment, fragment: &AstFragment,
expansion: LocalExpnId, expansion: LocalExpnId,
) { ) {
let InvocationParent { parent_def, impl_trait_context, in_attr } = let invocation_parent = resolver.invocation_parents[&expansion];
resolver.invocation_parents[&expansion]; let mut visitor = DefCollector { resolver, expansion, invocation_parent };
let mut visitor = DefCollector { resolver, parent_def, expansion, impl_trait_context, in_attr };
fragment.visit_with(&mut visitor); fragment.visit_with(&mut visitor);
} }
/// Creates `DefId`s for nodes in the AST. /// Creates `DefId`s for nodes in the AST.
struct DefCollector<'a, 'ra, 'tcx> { struct DefCollector<'a, 'ra, 'tcx> {
resolver: &'a mut Resolver<'ra, 'tcx>, resolver: &'a mut Resolver<'ra, 'tcx>,
parent_def: LocalDefId, invocation_parent: InvocationParent,
impl_trait_context: ImplTraitContext,
in_attr: bool,
expansion: LocalExpnId, expansion: LocalExpnId,
} }
@ -42,7 +39,7 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> {
def_kind: DefKind, def_kind: DefKind,
span: Span, span: Span,
) -> LocalDefId { ) -> LocalDefId {
let parent_def = self.parent_def; let parent_def = self.invocation_parent.parent_def;
debug!( debug!(
"create_def(node_id={:?}, def_kind={:?}, parent_def={:?})", "create_def(node_id={:?}, def_kind={:?}, parent_def={:?})",
node_id, def_kind, parent_def node_id, def_kind, parent_def
@ -60,9 +57,9 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> {
} }
fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: LocalDefId, f: F) { fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: LocalDefId, f: F) {
let orig_parent_def = mem::replace(&mut self.parent_def, parent_def); let orig_parent_def = mem::replace(&mut self.invocation_parent.parent_def, parent_def);
f(self); f(self);
self.parent_def = orig_parent_def; self.invocation_parent.parent_def = orig_parent_def;
} }
fn with_impl_trait<F: FnOnce(&mut Self)>( fn with_impl_trait<F: FnOnce(&mut Self)>(
@ -70,9 +67,10 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> {
impl_trait_context: ImplTraitContext, impl_trait_context: ImplTraitContext,
f: F, f: F,
) { ) {
let orig_itc = mem::replace(&mut self.impl_trait_context, impl_trait_context); let orig_itc =
mem::replace(&mut self.invocation_parent.impl_trait_context, impl_trait_context);
f(self); f(self);
self.impl_trait_context = orig_itc; self.invocation_parent.impl_trait_context = orig_itc;
} }
fn collect_field(&mut self, field: &'a FieldDef, index: Option<usize>) { fn collect_field(&mut self, field: &'a FieldDef, index: Option<usize>) {
@ -96,14 +94,7 @@ impl<'a, 'ra, 'tcx> DefCollector<'a, 'ra, 'tcx> {
fn visit_macro_invoc(&mut self, id: NodeId) { fn visit_macro_invoc(&mut self, id: NodeId) {
let id = id.placeholder_to_expn_id(); let id = id.placeholder_to_expn_id();
let old_parent = self.resolver.invocation_parents.insert( let old_parent = self.resolver.invocation_parents.insert(id, self.invocation_parent);
id,
InvocationParent {
parent_def: self.parent_def,
impl_trait_context: self.impl_trait_context,
in_attr: self.in_attr,
},
);
assert!(old_parent.is_none(), "parent `LocalDefId` is reset for an invocation"); assert!(old_parent.is_none(), "parent `LocalDefId` is reset for an invocation");
} }
} }
@ -367,7 +358,7 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> {
self.with_parent(def, |this| visit::walk_anon_const(this, constant)); self.with_parent(def, |this| visit::walk_anon_const(this, constant));
return; return;
} }
_ => self.parent_def, _ => self.invocation_parent.parent_def,
}; };
self.with_parent(parent_def, |this| visit::walk_expr(this, expr)) self.with_parent(parent_def, |this| visit::walk_expr(this, expr))
@ -382,13 +373,13 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> {
// output or built artifacts, so replace them here... // output or built artifacts, so replace them here...
// Perhaps we should instead format APITs more robustly. // Perhaps we should instead format APITs more robustly.
let name = Symbol::intern(&pprust::ty_to_string(ty).replace('\n', " ")); let name = Symbol::intern(&pprust::ty_to_string(ty).replace('\n', " "));
let kind = match self.impl_trait_context { let kind = match self.invocation_parent.impl_trait_context {
ImplTraitContext::Universal => DefKind::TyParam, ImplTraitContext::Universal => DefKind::TyParam,
ImplTraitContext::Existential => DefKind::OpaqueTy, ImplTraitContext::Existential => DefKind::OpaqueTy,
ImplTraitContext::InBinding => return visit::walk_ty(self, ty), ImplTraitContext::InBinding => return visit::walk_ty(self, ty),
}; };
let id = self.create_def(*id, Some(name), kind, ty.span); let id = self.create_def(*id, Some(name), kind, ty.span);
match self.impl_trait_context { match self.invocation_parent.impl_trait_context {
// Do not nest APIT, as we desugar them as `impl_trait: bounds`, // Do not nest APIT, as we desugar them as `impl_trait: bounds`,
// so the `impl_trait` node is not a parent to `bounds`. // so the `impl_trait` node is not a parent to `bounds`.
ImplTraitContext::Universal => visit::walk_ty(self, ty), ImplTraitContext::Universal => visit::walk_ty(self, ty),
@ -459,9 +450,9 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> {
} }
fn visit_attribute(&mut self, attr: &'a Attribute) -> Self::Result { fn visit_attribute(&mut self, attr: &'a Attribute) -> Self::Result {
let orig_in_attr = mem::replace(&mut self.in_attr, true); let orig_in_attr = mem::replace(&mut self.invocation_parent.in_attr, true);
visit::walk_attribute(self, attr); visit::walk_attribute(self, attr);
self.in_attr = orig_in_attr; self.invocation_parent.in_attr = orig_in_attr;
} }
fn visit_inline_asm(&mut self, asm: &'a InlineAsm) { fn visit_inline_asm(&mut self, asm: &'a InlineAsm) {

View file

@ -3329,34 +3329,44 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
}, },
|this| { |this| {
this.with_lifetime_rib( this.with_lifetime_rib(
LifetimeRibKind::StaticIfNoLifetimeInScope { // Until these are a hard error, we need to create them within the correct binder,
lint_id: item.id, // Otherwise the lifetimes of this assoc const think they are lifetimes of the trait.
// In impls, it's not a hard error yet due to backcompat. LifetimeRibKind::AnonymousCreateParameter {
emit_lint: true, binder: item.id,
report_in_path: true,
}, },
|this| { |this| {
// If this is a trait impl, ensure the const this.with_lifetime_rib(
// exists in trait LifetimeRibKind::StaticIfNoLifetimeInScope {
this.check_trait_item( lint_id: item.id,
item.id, // In impls, it's not a hard error yet due to backcompat.
item.ident, emit_lint: true,
&item.kind, },
ValueNS, |this| {
item.span, // If this is a trait impl, ensure the const
seen_trait_items, // exists in trait
|i, s, c| ConstNotMemberOfTrait(i, s, c), this.check_trait_item(
); item.id,
item.ident,
&item.kind,
ValueNS,
item.span,
seen_trait_items,
|i, s, c| ConstNotMemberOfTrait(i, s, c),
);
this.visit_generics(generics); this.visit_generics(generics);
this.visit_ty(ty); this.visit_ty(ty);
if let Some(expr) = expr { if let Some(expr) = expr {
// We allow arbitrary const expressions inside of associated consts, // We allow arbitrary const expressions inside of associated consts,
// even if they are potentially not const evaluatable. // even if they are potentially not const evaluatable.
// //
// Type parameters can already be used and as associated consts are // Type parameters can already be used and as associated consts are
// not used as part of the type system, this is far less surprising. // not used as part of the type system, this is far less surprising.
this.resolve_const_body(expr, None); this.resolve_const_body(expr, None);
} }
},
)
}, },
); );
}, },

View file

@ -2118,7 +2118,9 @@ symbols! {
type_changing_struct_update, type_changing_struct_update,
type_const, type_const,
type_id, type_id,
type_ir_infer_ctxt_like,
type_ir_inherent, type_ir_inherent,
type_ir_interner,
type_length_limit, type_length_limit,
type_macros, type_macros,
type_name, type_name,

View file

@ -16,7 +16,7 @@ use rustc_infer::traits::{Obligation, PolyTraitObligation, SelectionError};
use rustc_middle::ty::fast_reject::DeepRejectCtxt; use rustc_middle::ty::fast_reject::DeepRejectCtxt;
use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode}; use rustc_middle::ty::{self, Ty, TypeVisitableExt, TypingMode};
use rustc_middle::{bug, span_bug}; use rustc_middle::{bug, span_bug};
use rustc_type_ir::{Interner, elaborate}; use rustc_type_ir::elaborate;
use tracing::{debug, instrument, trace}; use tracing::{debug, instrument, trace};
use super::SelectionCandidate::*; use super::SelectionCandidate::*;
@ -802,7 +802,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
| ty::UnsafeBinder(_) => { | ty::UnsafeBinder(_) => {
// Only consider auto impls of unsafe traits when there are // Only consider auto impls of unsafe traits when there are
// no unsafe fields. // no unsafe fields.
if self.tcx().trait_is_unsafe(def_id) && self_ty.has_unsafe_fields() { if self.tcx().trait_def(def_id).safety.is_unsafe()
&& self_ty.has_unsafe_fields()
{
return; return;
} }

View file

@ -102,6 +102,7 @@ impl<I: Interner> TypingMode<I> {
} }
} }
#[cfg_attr(feature = "nightly", rustc_diagnostic_item = "type_ir_infer_ctxt_like")]
pub trait InferCtxtLike: Sized { pub trait InferCtxtLike: Sized {
type Interner: Interner; type Interner: Interner;
fn cx(&self) -> Self::Interner; fn cx(&self) -> Self::Interner;

View file

@ -15,6 +15,7 @@ use crate::solve::{CanonicalInput, ExternalConstraintsData, PredefinedOpaquesDat
use crate::visit::{Flags, TypeSuperVisitable, TypeVisitable}; use crate::visit::{Flags, TypeSuperVisitable, TypeVisitable};
use crate::{self as ty, search_graph}; use crate::{self as ty, search_graph};
#[cfg_attr(feature = "nightly", rustc_diagnostic_item = "type_ir_interner")]
pub trait Interner: pub trait Interner:
Sized Sized
+ Copy + Copy

View file

@ -6,6 +6,7 @@
feature(associated_type_defaults, never_type, rustc_attrs, negative_impls) feature(associated_type_defaults, never_type, rustc_attrs, negative_impls)
)] )]
#![cfg_attr(feature = "nightly", allow(internal_features))] #![cfg_attr(feature = "nightly", allow(internal_features))]
#![cfg_attr(not(bootstrap), allow(rustc::usage_of_type_ir_traits))]
// tidy-alphabetical-end // tidy-alphabetical-end
extern crate self as rustc_type_ir; extern crate self as rustc_type_ir;

View file

@ -14,6 +14,22 @@
- [Deny-by-default Lints](lints/listing/deny-by-default.md) - [Deny-by-default Lints](lints/listing/deny-by-default.md)
- [JSON Output](json.md) - [JSON Output](json.md)
- [Tests](tests/index.md) - [Tests](tests/index.md)
- [Targets](targets/index.md)
- [Built-in Targets](targets/built-in.md)
- [Custom Targets](targets/custom.md)
- [Known Issues](targets/known-issues.md)
- [Profile-guided Optimization](profile-guided-optimization.md)
- [Instrumentation-based Code Coverage](instrument-coverage.md)
- [Linker-plugin-based LTO](linker-plugin-lto.md)
- [Checking Conditional Configurations](check-cfg.md)
- [Cargo Specifics](check-cfg/cargo-specifics.md)
- [Exploit Mitigations](exploit-mitigations.md)
- [Symbol Mangling](symbol-mangling/index.md)
- [v0 Symbol Format](symbol-mangling/v0.md)
- [Contributing to `rustc`](contributing.md)
--------
- [Platform Support](platform-support.md) - [Platform Support](platform-support.md)
- [Target Tier Policy](target-tier-policy.md) - [Target Tier Policy](target-tier-policy.md)
- [Template for Target-specific Documentation](platform-support/TEMPLATE.md) - [Template for Target-specific Documentation](platform-support/TEMPLATE.md)
@ -66,6 +82,7 @@
- [m68k-unknown-none-elf](platform-support/m68k-unknown-none-elf.md) - [m68k-unknown-none-elf](platform-support/m68k-unknown-none-elf.md)
- [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md) - [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md)
- [mipsel-sony-psx](platform-support/mipsel-sony-psx.md) - [mipsel-sony-psx](platform-support/mipsel-sony-psx.md)
- [mipsel-unknown-linux-gnu](platform-support/mipsel-unknown-linux-gnu.md)
- [mips\*-mti-none-elf](platform-support/mips-mti-none-elf.md) - [mips\*-mti-none-elf](platform-support/mips-mti-none-elf.md)
- [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md)
- [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md)
@ -114,16 +131,3 @@
- [x86_64-unknown-none](platform-support/x86_64-unknown-none.md) - [x86_64-unknown-none](platform-support/x86_64-unknown-none.md)
- [xtensa-\*-none-elf](platform-support/xtensa.md) - [xtensa-\*-none-elf](platform-support/xtensa.md)
- [\*-nuttx-\*](platform-support/nuttx.md) - [\*-nuttx-\*](platform-support/nuttx.md)
- [Targets](targets/index.md)
- [Built-in Targets](targets/built-in.md)
- [Custom Targets](targets/custom.md)
- [Known Issues](targets/known-issues.md)
- [Profile-guided Optimization](profile-guided-optimization.md)
- [Instrumentation-based Code Coverage](instrument-coverage.md)
- [Linker-plugin-based LTO](linker-plugin-lto.md)
- [Checking Conditional Configurations](check-cfg.md)
- [Cargo Specifics](check-cfg/cargo-specifics.md)
- [Exploit Mitigations](exploit-mitigations.md)
- [Symbol Mangling](symbol-mangling/index.md)
- [v0 Symbol Format](symbol-mangling/v0.md)
- [Contributing to `rustc`](contributing.md)

View file

@ -334,7 +334,7 @@ target | std | host | notes
`mips64el-unknown-linux-muslabi64` | ✓ | | MIPS64 (little endian) Linux, N64 ABI, musl 1.2.3 `mips64el-unknown-linux-muslabi64` | ✓ | | MIPS64 (little endian) Linux, N64 ABI, musl 1.2.3
`mipsel-sony-psp` | * | | MIPS (LE) Sony PlayStation Portable (PSP) `mipsel-sony-psp` | * | | MIPS (LE) Sony PlayStation Portable (PSP)
[`mipsel-sony-psx`](platform-support/mipsel-sony-psx.md) | * | | MIPS (LE) Sony PlayStation 1 (PSX) [`mipsel-sony-psx`](platform-support/mipsel-sony-psx.md) | * | | MIPS (LE) Sony PlayStation 1 (PSX)
`mipsel-unknown-linux-gnu` | ✓ | ✓ | MIPS (little endian) Linux (kernel 4.4, glibc 2.23) [`mipsel-unknown-linux-gnu`](platform-support/mipsel-unknown-linux-gnu.md) | ✓ | ✓ | MIPS (little endian) Linux (kernel 4.4, glibc 2.23)
`mipsel-unknown-linux-musl` | ✓ | | MIPS (little endian) Linux with musl 1.2.3 `mipsel-unknown-linux-musl` | ✓ | | MIPS (little endian) Linux with musl 1.2.3
`mipsel-unknown-linux-uclibc` | ✓ | | MIPS (LE) Linux with uClibc `mipsel-unknown-linux-uclibc` | ✓ | | MIPS (LE) Linux with uClibc
[`mipsel-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | 32-bit MIPS (LE), requires mips32 cpu support [`mipsel-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | 32-bit MIPS (LE), requires mips32 cpu support

View file

@ -0,0 +1,28 @@
# `mipsel-unknown-linux-gnu`
**Tier: 3**
Little-endian 32 bit MIPS for Linux with `glibc.
## Target maintainers
- [@LukasWoodtli](https://github.com/LukasWoodtli)
## Requirements
The target supports std on Linux. Host tools are supported but not tested.
## Building the target
For cross compilation the GNU C compiler for the mipsel architecture needs to
be installed. On Ubuntu install the packets: `gcc-mipsel-linux-gnu` and
`g++-mipsel-linux-gnu`.
Add `mipsel-unknown-linux-gnu` as `target` list in `config.toml`.
## Building Rust programs
Rust does not ship pre-compiled artifacts for this target. To compile for
this target, you will need to build Rust with the target enabled (see
"Building the target" above).

18
tests/incremental/env/env_macro.rs vendored Normal file
View file

@ -0,0 +1,18 @@
// Check that changes to environment variables are propagated to `env!`.
//
// This test is intentionally written to not use any `#[cfg(rpass*)]`, to
// _really_ test that we re-compile if the environment variable changes.
//@ revisions: cfail1 rpass2 rpass3 cfail4
//@ [cfail1]unset-rustc-env:EXAMPLE_ENV
//@ [rpass2]rustc-env:EXAMPLE_ENV=one
//@ [rpass2]exec-env:EXAMPLE_ENV=one
//@ [rpass3]rustc-env:EXAMPLE_ENV=two
//@ [rpass3]exec-env:EXAMPLE_ENV=two
//@ [cfail4]unset-rustc-env:EXAMPLE_ENV
fn main() {
assert_eq!(env!("EXAMPLE_ENV"), std::env::var("EXAMPLE_ENV").unwrap());
//[cfail1]~^ ERROR environment variable `EXAMPLE_ENV` not defined at compile time
//[cfail4]~^^ ERROR environment variable `EXAMPLE_ENV` not defined at compile time
}

View file

@ -0,0 +1,18 @@
// Check that changes to environment variables are propagated to `option_env!`.
//
// This test is intentionally written to not use any `#[cfg(rpass*)]`, to
// _really_ test that we re-compile if the environment variable changes.
//@ revisions: rpass1 rpass2 rpass3 rpass4
//@ [rpass1]unset-rustc-env:EXAMPLE_ENV
//@ [rpass1]unset-exec-env:EXAMPLE_ENV
//@ [rpass2]rustc-env:EXAMPLE_ENV=one
//@ [rpass2]exec-env:EXAMPLE_ENV=one
//@ [rpass3]rustc-env:EXAMPLE_ENV=two
//@ [rpass3]exec-env:EXAMPLE_ENV=two
//@ [rpass4]unset-rustc-env:EXAMPLE_ENV
//@ [rpass4]unset-exec-env:EXAMPLE_ENV
fn main() {
assert_eq!(option_env!("EXAMPLE_ENV"), std::env::var("EXAMPLE_ENV").ok().as_deref());
}

View file

@ -0,0 +1,16 @@
//@ compile-flags: -Z unstable-options
//@ ignore-stage1
#![feature(rustc_private)]
#![deny(rustc::usage_of_type_ir_traits)]
extern crate rustc_type_ir;
use rustc_type_ir::Interner;
fn foo<I: Interner>(cx: I, did: I::DefId) {
let _ = cx.trait_is_unsafe(did);
//~^ ERROR do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver
}
fn main() {}

View file

@ -0,0 +1,15 @@
error: do not use `rustc_type_ir::Interner` or `rustc_type_ir::InferCtxtLike` unless you're inside of the trait solver
--> $DIR/import-of-type-ir-traits.rs:12:13
|
LL | let _ = cx.trait_is_unsafe(did);
| ^^^^^^^^^^^^^^^^^^^^^^^
|
= note: the method or struct you're looking for is likely defined somewhere else downstream in the compiler
note: the lint level is defined here
--> $DIR/import-of-type-ir-traits.rs:5:9
|
LL | #![deny(rustc::usage_of_type_ir_traits)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 1 previous error

View file

@ -0,0 +1,16 @@
//! Test that linking a no_std application still outputs the
//! `native-static-libs: ` note, even though it is empty.
//@ compile-flags: -Cpanic=abort --print=native-static-libs
//@ build-pass
//@ error-pattern: note: native-static-libs:
//@ dont-check-compiler-stderr (libcore links `/defaultlib:msvcrt` or `/defaultlib:libcmt` on MSVC)
//@ ignore-pass (the note is emitted later in the compilation pipeline, needs build)
#![crate_type = "staticlib"]
#![no_std]
#[panic_handler]
fn panic(_info: &core::panic::PanicInfo) -> ! {
loop {}
}

View file

@ -35,8 +35,6 @@ note: cannot automatically infer `'static` because of other lifetimes in scope
| |
LL | impl<'a> Foo<'a> { LL | impl<'a> Foo<'a> {
| ^^ | ^^
LL | const FOO: Foo<'_> = Foo { x: PhantomData::<&()> };
| ^^
help: use the `'static` lifetime help: use the `'static` lifetime
| |
LL | const BAR: &'static () = &(); LL | const BAR: &'static () = &();

View file

@ -16,7 +16,7 @@ impl Bar for Foo<'_> {
const STATIC: &str = ""; const STATIC: &str = "";
//~^ ERROR `&` without an explicit lifetime name cannot be used here //~^ ERROR `&` without an explicit lifetime name cannot be used here
//~| WARN this was previously accepted by the compiler but is being phased out //~| WARN this was previously accepted by the compiler but is being phased out
//~| ERROR const not compatible with trait //~| ERROR lifetime parameters or bounds on const `STATIC` do not match the trait declaration
} }
fn main() {} fn main() {}

View file

@ -39,21 +39,15 @@ help: use the `'static` lifetime
LL | const STATIC: &'static str = ""; LL | const STATIC: &'static str = "";
| +++++++ | +++++++
error[E0308]: const not compatible with trait error[E0195]: lifetime parameters or bounds on const `STATIC` do not match the trait declaration
--> $DIR/elided-lifetime.rs:16:5 --> $DIR/elided-lifetime.rs:16:17
| |
LL | const STATIC: &str;
| - lifetimes in impl do not match this const in trait
...
LL | const STATIC: &str = ""; LL | const STATIC: &str = "";
| ^^^^^^^^^^^^^^^^^^ lifetime mismatch | ^ lifetimes do not match const in trait
|
= note: expected reference `&'static _`
found reference `&_`
note: the anonymous lifetime as defined here...
--> $DIR/elided-lifetime.rs:16:19
|
LL | const STATIC: &str = "";
| ^
= note: ...does not necessarily outlive the static lifetime
error: aborting due to 3 previous errors error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0308`. For more information about this error, try `rustc --explain E0195`.

View file

@ -9,7 +9,7 @@ impl Bar<'_> for A {
const STATIC: &str = ""; const STATIC: &str = "";
//~^ ERROR `&` without an explicit lifetime name cannot be used here //~^ ERROR `&` without an explicit lifetime name cannot be used here
//~| WARN this was previously accepted by the compiler but is being phased out //~| WARN this was previously accepted by the compiler but is being phased out
//~| ERROR const not compatible with trait //~| ERROR lifetime parameters or bounds on const `STATIC` do not match the trait declaration
} }
struct B; struct B;

View file

@ -21,25 +21,15 @@ help: use the `'static` lifetime
LL | const STATIC: &'static str = ""; LL | const STATIC: &'static str = "";
| +++++++ | +++++++
error[E0308]: const not compatible with trait error[E0195]: lifetime parameters or bounds on const `STATIC` do not match the trait declaration
--> $DIR/static-trait-impl.rs:9:5 --> $DIR/static-trait-impl.rs:9:17
| |
LL | const STATIC: &'a str;
| - lifetimes in impl do not match this const in trait
...
LL | const STATIC: &str = ""; LL | const STATIC: &str = "";
| ^^^^^^^^^^^^^^^^^^ lifetime mismatch | ^ lifetimes do not match const in trait
|
= note: expected reference `&_`
found reference `&_`
note: the anonymous lifetime as defined here...
--> $DIR/static-trait-impl.rs:9:19
|
LL | const STATIC: &str = "";
| ^
note: ...does not necessarily outlive the anonymous lifetime as defined here
--> $DIR/static-trait-impl.rs:8:10
|
LL | impl Bar<'_> for A {
| ^^
error: aborting due to 2 previous errors error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0308`. For more information about this error, try `rustc --explain E0195`.

View file

@ -0,0 +1,24 @@
// Regression test for #138942, where a function was incorrectly internalized, despite the fact
// that it was referenced by a var debug info from another code generation unit.
//
//@ build-pass
//@ revisions: limited full
//@ compile-flags: -Ccodegen-units=4
//@[limited] compile-flags: -Cdebuginfo=limited
//@[full] compile-flags: -Cdebuginfo=full
trait Fun {
const FUN: &'static fn();
}
impl Fun for () {
const FUN: &'static fn() = &(detail::f as fn());
}
mod detail {
// Place `f` in a distinct module to generate a separate code generation unit.
#[inline(never)]
pub(super) fn f() {}
}
fn main() {
// SingleUseConsts represents "x" using VarDebugInfoContents::Const.
// It is the only reference to `f` remaining.
let x = <() as ::Fun>::FUN;
}

View file

@ -581,12 +581,12 @@ trigger_files = [
] ]
[notify-zulip."I-prioritize"] [notify-zulip."I-prioritize"]
zulip_stream = 245100 # #t-compiler/wg-prioritization/alerts zulip_stream = 245100 # #t-compiler/prioritization/alerts
topic = "#{number} {title}" topic = "#{number} {title}"
message_on_add = """\ message_on_add = """\
@*WG-prioritization/alerts* issue #{number} has been requested for prioritization. @*WG-prioritization/alerts* issue #{number} has been requested for prioritization.
# [Procedure](https://forge.rust-lang.org/compiler/prioritization/procedure.html#assign-priority-to-unprioritized-issues-with-i-prioritize-label) # [Procedure](https://forge.rust-lang.org/compiler/prioritization.html)
- Priority? - Priority?
- Regression? - Regression?
- Notify people/groups? - Notify people/groups?