Auto merge of #105220 - oli-obk:feeding, r=cjgillot

feed resolver_for_lowering instead of storing it in a field

r? `@cjgillot`

opening this as

* a discussion for `no_hash` + `feedable` queries. I think we'll want those, but I don't quite understand why they are rejected beyond a double check of the stable hashes for situations where the query is fed but also read from incremental caches.
* and a discussion on removing all untracked fields from TyCtxt and setting it up so that they are fed queries instead
This commit is contained in:
bors 2022-12-06 03:47:41 +00:00
commit ed61c139c2
11 changed files with 86 additions and 57 deletions

View file

@ -12,6 +12,7 @@ use rustc_ast::{self as ast, visit};
use rustc_borrowck as mir_borrowck;
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_data_structures::parallel;
use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
use rustc_errors::{ErrorGuaranteed, PResult};
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
@ -801,6 +802,12 @@ pub fn create_global_ctxt<'tcx>(
TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
});
let ty::ResolverOutputs {
definitions,
global_ctxt: untracked_resolutions,
ast_lowering: untracked_resolver_for_lowering,
} = resolver_outputs;
let gcx = sess.time("setup_global_ctxt", || {
global_ctxt.get_or_init(move || {
TyCtxt::create_global_ctxt(
@ -808,7 +815,8 @@ pub fn create_global_ctxt<'tcx>(
lint_store,
arena,
hir_arena,
resolver_outputs,
definitions,
untracked_resolutions,
krate,
dep_graph,
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
@ -820,7 +828,12 @@ pub fn create_global_ctxt<'tcx>(
})
});
QueryContext { gcx }
let mut qcx = QueryContext { gcx };
qcx.enter(|tcx| {
tcx.feed_unit_query()
.resolver_for_lowering(tcx.arena.alloc(Steal::new(untracked_resolver_for_lowering)))
});
qcx
}
/// Runs the resolution, type-checking, region checking and other
@ -965,12 +978,10 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
pub fn start_codegen<'tcx>(
codegen_backend: &dyn CodegenBackend,
tcx: TyCtxt<'tcx>,
outputs: &OutputFilenames,
) -> Box<dyn Any> {
info!("Pre-codegen\n{:?}", tcx.debug_stats());
let (metadata, need_metadata_module) =
rustc_metadata::fs::encode_and_write_metadata(tcx, outputs);
let (metadata, need_metadata_module) = rustc_metadata::fs::encode_and_write_metadata(tcx);
let codegen = tcx.sess.time("codegen_crate", move || {
codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
@ -986,7 +997,7 @@ pub fn start_codegen<'tcx>(
info!("Post-codegen\n{:?}", tcx.debug_stats());
if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx, outputs) {
if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx) {
tcx.sess.emit_err(CantEmitMIR { error });
tcx.sess.abort_if_errors();
}

View file

@ -20,6 +20,7 @@ use rustc_span::symbol::sym;
use std::any::Any;
use std::cell::{Ref, RefCell, RefMut};
use std::rc::Rc;
use std::sync::Arc;
/// Represent the result of a query.
///
@ -214,7 +215,7 @@ impl<'tcx> Queries<'tcx> {
pub fn global_ctxt(&'tcx self) -> Result<&Query<QueryContext<'tcx>>> {
self.global_ctxt.compute(|| {
let crate_name = self.crate_name()?.peek().clone();
let outputs = self.prepare_outputs()?.peek().clone();
let outputs = self.prepare_outputs()?.take();
let dep_graph = self.dep_graph()?.peek().clone();
let (krate, resolver, lint_store) = self.expansion()?.take();
Ok(passes::create_global_ctxt(
@ -235,7 +236,6 @@ impl<'tcx> Queries<'tcx> {
pub fn ongoing_codegen(&'tcx self) -> Result<&Query<Box<dyn Any>>> {
self.ongoing_codegen.compute(|| {
let outputs = self.prepare_outputs()?;
self.global_ctxt()?.peek_mut().enter(|tcx| {
tcx.analysis(()).ok();
@ -249,7 +249,7 @@ impl<'tcx> Queries<'tcx> {
// Hook for UI tests.
Self::check_for_rustc_errors_attr(tcx);
Ok(passes::start_codegen(&***self.codegen_backend(), tcx, &*outputs.peek()))
Ok(passes::start_codegen(&***self.codegen_backend(), tcx))
})
})
}
@ -293,8 +293,10 @@ impl<'tcx> Queries<'tcx> {
let codegen_backend = self.codegen_backend().clone();
let dep_graph = self.dep_graph()?.peek().clone();
let prepare_outputs = self.prepare_outputs()?.take();
let crate_hash = self.global_ctxt()?.peek_mut().enter(|tcx| tcx.crate_hash(LOCAL_CRATE));
let (crate_hash, prepare_outputs) = self
.global_ctxt()?
.peek_mut()
.enter(|tcx| (tcx.crate_hash(LOCAL_CRATE), tcx.output_filenames(()).clone()));
let ongoing_codegen = self.ongoing_codegen()?.take();
Ok(Linker {
@ -316,7 +318,7 @@ pub struct Linker {
// compilation outputs
dep_graph: DepGraph,
prepare_outputs: OutputFilenames,
prepare_outputs: Arc<OutputFilenames>,
crate_hash: Svh,
ongoing_codegen: Box<dyn Any>,
}