Remove all threading through of ErrorGuaranteed from the driver
It was inconsistently done (sometimes even within a single function) and most of the rest of the compiler uses fatal errors instead, which need to be caught using catch_with_exit_code anyway. Using fatal errors instead of ErrorGuaranteed everywhere in the driver simplifies things a bit.
This commit is contained in:
parent
030545d8c3
commit
401dd840ff
19 changed files with 175 additions and 216 deletions
|
@ -36,7 +36,7 @@ use rustc_codegen_ssa::back::write::{
|
||||||
use rustc_codegen_ssa::traits::*;
|
use rustc_codegen_ssa::traits::*;
|
||||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
|
use rustc_errors::{DiagCtxtHandle, FatalError};
|
||||||
use rustc_metadata::EncodedMetadata;
|
use rustc_metadata::EncodedMetadata;
|
||||||
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
|
@ -370,19 +370,14 @@ impl CodegenBackend for LlvmCodegenBackend {
|
||||||
(codegen_results, work_products)
|
(codegen_results, work_products)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn link(
|
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) {
|
||||||
&self,
|
|
||||||
sess: &Session,
|
|
||||||
codegen_results: CodegenResults,
|
|
||||||
outputs: &OutputFilenames,
|
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
|
||||||
use rustc_codegen_ssa::back::link::link_binary;
|
use rustc_codegen_ssa::back::link::link_binary;
|
||||||
|
|
||||||
use crate::back::archive::LlvmArchiveBuilderBuilder;
|
use crate::back::archive::LlvmArchiveBuilderBuilder;
|
||||||
|
|
||||||
// Run the linker on any artifacts that resulted from the LLVM run.
|
// Run the linker on any artifacts that resulted from the LLVM run.
|
||||||
// This should produce either a finished executable or library.
|
// This should produce either a finished executable or library.
|
||||||
link_binary(sess, &LlvmArchiveBuilderBuilder, codegen_results, outputs)
|
link_binary(sess, &LlvmArchiveBuilderBuilder, codegen_results, outputs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ use rustc_ast::CRATE_NODE_ID;
|
||||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||||
use rustc_data_structures::memmap::Mmap;
|
use rustc_data_structures::memmap::Mmap;
|
||||||
use rustc_data_structures::temp_dir::MaybeTempDir;
|
use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
|
use rustc_errors::{DiagCtxtHandle, FatalError};
|
||||||
use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize};
|
use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize};
|
||||||
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
|
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||||
use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file};
|
use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file};
|
||||||
|
@ -71,7 +71,7 @@ pub fn link_binary(
|
||||||
archive_builder_builder: &dyn ArchiveBuilderBuilder,
|
archive_builder_builder: &dyn ArchiveBuilderBuilder,
|
||||||
codegen_results: CodegenResults,
|
codegen_results: CodegenResults,
|
||||||
outputs: &OutputFilenames,
|
outputs: &OutputFilenames,
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
) {
|
||||||
let _timer = sess.timer("link_binary");
|
let _timer = sess.timer("link_binary");
|
||||||
let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata);
|
let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata);
|
||||||
let mut tempfiles_for_stdout_output: Vec<PathBuf> = Vec::new();
|
let mut tempfiles_for_stdout_output: Vec<PathBuf> = Vec::new();
|
||||||
|
@ -119,7 +119,7 @@ pub fn link_binary(
|
||||||
&codegen_results,
|
&codegen_results,
|
||||||
RlibFlavor::Normal,
|
RlibFlavor::Normal,
|
||||||
&path,
|
&path,
|
||||||
)?
|
)
|
||||||
.build(&out_filename);
|
.build(&out_filename);
|
||||||
}
|
}
|
||||||
CrateType::Staticlib => {
|
CrateType::Staticlib => {
|
||||||
|
@ -129,7 +129,7 @@ pub fn link_binary(
|
||||||
&codegen_results,
|
&codegen_results,
|
||||||
&out_filename,
|
&out_filename,
|
||||||
&path,
|
&path,
|
||||||
)?;
|
);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
link_natively(
|
link_natively(
|
||||||
|
@ -139,7 +139,7 @@ pub fn link_binary(
|
||||||
&out_filename,
|
&out_filename,
|
||||||
&codegen_results,
|
&codegen_results,
|
||||||
path.as_ref(),
|
path.as_ref(),
|
||||||
)?;
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if sess.opts.json_artifact_notifications {
|
if sess.opts.json_artifact_notifications {
|
||||||
|
@ -225,8 +225,6 @@ pub fn link_binary(
|
||||||
maybe_remove_temps_from_module(preserve_objects, preserve_dwarf_objects, module);
|
maybe_remove_temps_from_module(preserve_objects, preserve_dwarf_objects, module);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Crate type is not passed when calculating the dylibs to include for LTO. In that case all
|
// Crate type is not passed when calculating the dylibs to include for LTO. In that case all
|
||||||
|
@ -298,7 +296,7 @@ fn link_rlib<'a>(
|
||||||
codegen_results: &CodegenResults,
|
codegen_results: &CodegenResults,
|
||||||
flavor: RlibFlavor,
|
flavor: RlibFlavor,
|
||||||
tmpdir: &MaybeTempDir,
|
tmpdir: &MaybeTempDir,
|
||||||
) -> Result<Box<dyn ArchiveBuilder + 'a>, ErrorGuaranteed> {
|
) -> Box<dyn ArchiveBuilder + 'a> {
|
||||||
let mut ab = archive_builder_builder.new_archive_builder(sess);
|
let mut ab = archive_builder_builder.new_archive_builder(sess);
|
||||||
|
|
||||||
let trailing_metadata = match flavor {
|
let trailing_metadata = match flavor {
|
||||||
|
@ -374,7 +372,7 @@ fn link_rlib<'a>(
|
||||||
{
|
{
|
||||||
let path = find_native_static_library(filename.as_str(), true, sess);
|
let path = find_native_static_library(filename.as_str(), true, sess);
|
||||||
let src = read(path)
|
let src = read(path)
|
||||||
.map_err(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }))?;
|
.unwrap_or_else(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }));
|
||||||
let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src);
|
let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src);
|
||||||
let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
|
let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
|
||||||
packed_bundled_libs.push(wrapper_file);
|
packed_bundled_libs.push(wrapper_file);
|
||||||
|
@ -392,7 +390,7 @@ fn link_rlib<'a>(
|
||||||
codegen_results.crate_info.used_libraries.iter(),
|
codegen_results.crate_info.used_libraries.iter(),
|
||||||
tmpdir.as_ref(),
|
tmpdir.as_ref(),
|
||||||
true,
|
true,
|
||||||
)? {
|
) {
|
||||||
ab.add_archive(&output_path, Box::new(|_| false)).unwrap_or_else(|error| {
|
ab.add_archive(&output_path, Box::new(|_| false)).unwrap_or_else(|error| {
|
||||||
sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: output_path, error });
|
sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: output_path, error });
|
||||||
});
|
});
|
||||||
|
@ -433,7 +431,7 @@ fn link_rlib<'a>(
|
||||||
ab.add_file(&lib)
|
ab.add_file(&lib)
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ab)
|
ab
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract all symbols defined in raw-dylib libraries, collated by library name.
|
/// Extract all symbols defined in raw-dylib libraries, collated by library name.
|
||||||
|
@ -445,7 +443,7 @@ fn link_rlib<'a>(
|
||||||
fn collate_raw_dylibs<'a>(
|
fn collate_raw_dylibs<'a>(
|
||||||
sess: &Session,
|
sess: &Session,
|
||||||
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
|
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
|
||||||
) -> Result<Vec<(String, Vec<DllImport>)>, ErrorGuaranteed> {
|
) -> Vec<(String, Vec<DllImport>)> {
|
||||||
// Use index maps to preserve original order of imports and libraries.
|
// Use index maps to preserve original order of imports and libraries.
|
||||||
let mut dylib_table = FxIndexMap::<String, FxIndexMap<Symbol, &DllImport>>::default();
|
let mut dylib_table = FxIndexMap::<String, FxIndexMap<Symbol, &DllImport>>::default();
|
||||||
|
|
||||||
|
@ -469,15 +467,13 @@ fn collate_raw_dylibs<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(guar) = sess.dcx().has_errors() {
|
sess.dcx().abort_if_errors();
|
||||||
return Err(guar);
|
dylib_table
|
||||||
}
|
|
||||||
Ok(dylib_table
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(name, imports)| {
|
.map(|(name, imports)| {
|
||||||
(name, imports.into_iter().map(|(_, import)| import.clone()).collect())
|
(name, imports.into_iter().map(|(_, import)| import.clone()).collect())
|
||||||
})
|
})
|
||||||
.collect())
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_dll_import_libs<'a>(
|
fn create_dll_import_libs<'a>(
|
||||||
|
@ -486,8 +482,8 @@ fn create_dll_import_libs<'a>(
|
||||||
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
|
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
|
||||||
tmpdir: &Path,
|
tmpdir: &Path,
|
||||||
is_direct_dependency: bool,
|
is_direct_dependency: bool,
|
||||||
) -> Result<Vec<PathBuf>, ErrorGuaranteed> {
|
) -> Vec<PathBuf> {
|
||||||
Ok(collate_raw_dylibs(sess, used_libraries)?
|
collate_raw_dylibs(sess, used_libraries)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(raw_dylib_name, raw_dylib_imports)| {
|
.map(|(raw_dylib_name, raw_dylib_imports)| {
|
||||||
let name_suffix = if is_direct_dependency { "_imports" } else { "_imports_indirect" };
|
let name_suffix = if is_direct_dependency { "_imports" } else { "_imports_indirect" };
|
||||||
|
@ -537,7 +533,7 @@ fn create_dll_import_libs<'a>(
|
||||||
|
|
||||||
output_path
|
output_path
|
||||||
})
|
})
|
||||||
.collect())
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a static archive.
|
/// Create a static archive.
|
||||||
|
@ -557,7 +553,7 @@ fn link_staticlib(
|
||||||
codegen_results: &CodegenResults,
|
codegen_results: &CodegenResults,
|
||||||
out_filename: &Path,
|
out_filename: &Path,
|
||||||
tempdir: &MaybeTempDir,
|
tempdir: &MaybeTempDir,
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
) {
|
||||||
info!("preparing staticlib to {:?}", out_filename);
|
info!("preparing staticlib to {:?}", out_filename);
|
||||||
let mut ab = link_rlib(
|
let mut ab = link_rlib(
|
||||||
sess,
|
sess,
|
||||||
|
@ -565,7 +561,7 @@ fn link_staticlib(
|
||||||
codegen_results,
|
codegen_results,
|
||||||
RlibFlavor::StaticlibBase,
|
RlibFlavor::StaticlibBase,
|
||||||
tempdir,
|
tempdir,
|
||||||
)?;
|
);
|
||||||
let mut all_native_libs = vec![];
|
let mut all_native_libs = vec![];
|
||||||
|
|
||||||
let res = each_linked_rlib(
|
let res = each_linked_rlib(
|
||||||
|
@ -656,8 +652,6 @@ fn link_staticlib(
|
||||||
print_native_static_libs(sess, &print.out, &all_native_libs, &all_rust_dylibs);
|
print_native_static_libs(sess, &print.out, &all_native_libs, &all_rust_dylibs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Use `thorin` (rust implementation of a dwarf packaging utility) to link DWARF objects into a
|
/// Use `thorin` (rust implementation of a dwarf packaging utility) to link DWARF objects into a
|
||||||
|
@ -773,7 +767,7 @@ fn link_natively(
|
||||||
out_filename: &Path,
|
out_filename: &Path,
|
||||||
codegen_results: &CodegenResults,
|
codegen_results: &CodegenResults,
|
||||||
tmpdir: &Path,
|
tmpdir: &Path,
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
) {
|
||||||
info!("preparing {:?} to {:?}", crate_type, out_filename);
|
info!("preparing {:?} to {:?}", crate_type, out_filename);
|
||||||
let (linker_path, flavor) = linker_and_flavor(sess);
|
let (linker_path, flavor) = linker_and_flavor(sess);
|
||||||
let self_contained_components = self_contained_components(sess, crate_type);
|
let self_contained_components = self_contained_components(sess, crate_type);
|
||||||
|
@ -797,7 +791,7 @@ fn link_natively(
|
||||||
temp_filename,
|
temp_filename,
|
||||||
codegen_results,
|
codegen_results,
|
||||||
self_contained_components,
|
self_contained_components,
|
||||||
)?;
|
);
|
||||||
|
|
||||||
linker::disable_localization(&mut cmd);
|
linker::disable_localization(&mut cmd);
|
||||||
|
|
||||||
|
@ -1177,8 +1171,6 @@ fn link_natively(
|
||||||
ab.add_file(temp_filename);
|
ab.add_file(temp_filename);
|
||||||
ab.build(out_filename);
|
ab.build(out_filename);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn strip_symbols_with_external_utility(
|
fn strip_symbols_with_external_utility(
|
||||||
|
@ -2232,7 +2224,7 @@ fn linker_with_args(
|
||||||
out_filename: &Path,
|
out_filename: &Path,
|
||||||
codegen_results: &CodegenResults,
|
codegen_results: &CodegenResults,
|
||||||
self_contained_components: LinkSelfContainedComponents,
|
self_contained_components: LinkSelfContainedComponents,
|
||||||
) -> Result<Command, ErrorGuaranteed> {
|
) -> Command {
|
||||||
let self_contained_crt_objects = self_contained_components.is_crt_objects_enabled();
|
let self_contained_crt_objects = self_contained_components.is_crt_objects_enabled();
|
||||||
let cmd = &mut *super::linker::get_linker(
|
let cmd = &mut *super::linker::get_linker(
|
||||||
sess,
|
sess,
|
||||||
|
@ -2356,7 +2348,7 @@ fn linker_with_args(
|
||||||
codegen_results.crate_info.used_libraries.iter(),
|
codegen_results.crate_info.used_libraries.iter(),
|
||||||
tmpdir,
|
tmpdir,
|
||||||
true,
|
true,
|
||||||
)? {
|
) {
|
||||||
cmd.add_object(&output_path);
|
cmd.add_object(&output_path);
|
||||||
}
|
}
|
||||||
// As with add_upstream_native_libraries, we need to add the upstream raw-dylib symbols in case
|
// As with add_upstream_native_libraries, we need to add the upstream raw-dylib symbols in case
|
||||||
|
@ -2388,7 +2380,7 @@ fn linker_with_args(
|
||||||
native_libraries_from_nonstatics,
|
native_libraries_from_nonstatics,
|
||||||
tmpdir,
|
tmpdir,
|
||||||
false,
|
false,
|
||||||
)? {
|
) {
|
||||||
cmd.add_object(&output_path);
|
cmd.add_object(&output_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2435,7 +2427,7 @@ fn linker_with_args(
|
||||||
// to it and remove the option. Currently the last holdout is wasm32-unknown-emscripten.
|
// to it and remove the option. Currently the last holdout is wasm32-unknown-emscripten.
|
||||||
add_post_link_args(cmd, sess, flavor);
|
add_post_link_args(cmd, sess, flavor);
|
||||||
|
|
||||||
Ok(cmd.take_cmd())
|
cmd.take_cmd()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_order_independent_options(
|
fn add_order_independent_options(
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::hash::Hash;
|
||||||
use rustc_ast::expand::allocator::AllocatorKind;
|
use rustc_ast::expand::allocator::AllocatorKind;
|
||||||
use rustc_data_structures::fx::FxIndexMap;
|
use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_data_structures::sync::{DynSend, DynSync};
|
use rustc_data_structures::sync::{DynSend, DynSync};
|
||||||
use rustc_errors::ErrorGuaranteed;
|
|
||||||
use rustc_metadata::EncodedMetadata;
|
use rustc_metadata::EncodedMetadata;
|
||||||
use rustc_metadata::creader::MetadataLoaderDyn;
|
use rustc_metadata::creader::MetadataLoaderDyn;
|
||||||
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
||||||
|
@ -84,13 +83,8 @@ pub trait CodegenBackend {
|
||||||
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>);
|
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>);
|
||||||
|
|
||||||
/// This is called on the returned [`CodegenResults`] from [`join_codegen`](Self::join_codegen).
|
/// This is called on the returned [`CodegenResults`] from [`join_codegen`](Self::join_codegen).
|
||||||
fn link(
|
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) {
|
||||||
&self,
|
link_binary(sess, &ArArchiveBuilderBuilder, codegen_results, outputs);
|
||||||
sess: &Session,
|
|
||||||
codegen_results: CodegenResults,
|
|
||||||
outputs: &OutputFilenames,
|
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
|
||||||
link_binary(sess, &ArArchiveBuilderBuilder, codegen_results, outputs)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this backend can be safely called from multiple threads.
|
/// Returns `true` if this backend can be safely called from multiple threads.
|
||||||
|
|
|
@ -99,10 +99,7 @@ impl Expander {
|
||||||
/// If this function is intended to be used with command line arguments,
|
/// If this function is intended to be used with command line arguments,
|
||||||
/// `argv[0]` must be removed prior to calling it manually.
|
/// `argv[0]` must be removed prior to calling it manually.
|
||||||
#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
|
#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
|
||||||
pub fn arg_expand_all(
|
pub fn arg_expand_all(early_dcx: &EarlyDiagCtxt, at_args: &[String]) -> Vec<String> {
|
||||||
early_dcx: &EarlyDiagCtxt,
|
|
||||||
at_args: &[String],
|
|
||||||
) -> Result<Vec<String>, ErrorGuaranteed> {
|
|
||||||
let mut expander = Expander::default();
|
let mut expander = Expander::default();
|
||||||
let mut result = Ok(());
|
let mut result = Ok(());
|
||||||
for arg in at_args {
|
for arg in at_args {
|
||||||
|
@ -110,7 +107,10 @@ pub fn arg_expand_all(
|
||||||
result = Err(early_dcx.early_err(format!("failed to load argument file: {err}")));
|
result = Err(early_dcx.early_err(format!("failed to load argument file: {err}")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.map(|()| expander.finish())
|
if let Err(guar) = result {
|
||||||
|
guar.raise_fatal();
|
||||||
|
}
|
||||||
|
expander.finish()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the raw unprocessed command-line arguments as Unicode strings, without doing any further
|
/// Gets the raw unprocessed command-line arguments as Unicode strings, without doing any further
|
||||||
|
|
|
@ -42,9 +42,7 @@ use rustc_data_structures::profiling::{
|
||||||
};
|
};
|
||||||
use rustc_errors::emitter::stderr_destination;
|
use rustc_errors::emitter::stderr_destination;
|
||||||
use rustc_errors::registry::Registry;
|
use rustc_errors::registry::Registry;
|
||||||
use rustc_errors::{
|
use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown};
|
||||||
ColorConfig, DiagCtxt, ErrCode, ErrorGuaranteed, FatalError, PResult, markdown,
|
|
||||||
};
|
|
||||||
use rustc_feature::find_gated_cfg;
|
use rustc_feature::find_gated_cfg;
|
||||||
use rustc_interface::util::{self, get_codegen_backend};
|
use rustc_interface::util::{self, get_codegen_backend};
|
||||||
use rustc_interface::{Linker, Queries, interface, passes};
|
use rustc_interface::{Linker, Queries, interface, passes};
|
||||||
|
@ -271,14 +269,14 @@ impl<'a> RunCompiler<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse args and run the compiler.
|
/// Parse args and run the compiler.
|
||||||
pub fn run(self) -> interface::Result<()> {
|
pub fn run(self) {
|
||||||
run_compiler(
|
run_compiler(
|
||||||
self.at_args,
|
self.at_args,
|
||||||
self.callbacks,
|
self.callbacks,
|
||||||
self.file_loader,
|
self.file_loader,
|
||||||
self.make_codegen_backend,
|
self.make_codegen_backend,
|
||||||
self.using_internal_features,
|
self.using_internal_features,
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -290,7 +288,7 @@ fn run_compiler(
|
||||||
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
|
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
|
||||||
>,
|
>,
|
||||||
using_internal_features: Arc<std::sync::atomic::AtomicBool>,
|
using_internal_features: Arc<std::sync::atomic::AtomicBool>,
|
||||||
) -> interface::Result<()> {
|
) {
|
||||||
let mut default_early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
|
let mut default_early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
|
||||||
|
|
||||||
// Throw away the first argument, the name of the binary.
|
// Throw away the first argument, the name of the binary.
|
||||||
|
@ -303,9 +301,11 @@ fn run_compiler(
|
||||||
// the compiler with @empty_file as argv[0] and no more arguments.
|
// the compiler with @empty_file as argv[0] and no more arguments.
|
||||||
let at_args = at_args.get(1..).unwrap_or_default();
|
let at_args = at_args.get(1..).unwrap_or_default();
|
||||||
|
|
||||||
let args = args::arg_expand_all(&default_early_dcx, at_args)?;
|
let args = args::arg_expand_all(&default_early_dcx, at_args);
|
||||||
|
|
||||||
let Some(matches) = handle_options(&default_early_dcx, &args) else { return Ok(()) };
|
let Some(matches) = handle_options(&default_early_dcx, &args) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
let sopts = config::build_session_options(&mut default_early_dcx, &matches);
|
let sopts = config::build_session_options(&mut default_early_dcx, &matches);
|
||||||
// fully initialize ice path static once unstable options are available as context
|
// fully initialize ice path static once unstable options are available as context
|
||||||
|
@ -313,7 +313,7 @@ fn run_compiler(
|
||||||
|
|
||||||
if let Some(ref code) = matches.opt_str("explain") {
|
if let Some(ref code) = matches.opt_str("explain") {
|
||||||
handle_explain(&default_early_dcx, diagnostics_registry(), code, sopts.color);
|
handle_explain(&default_early_dcx, diagnostics_registry(), code, sopts.color);
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (odir, ofile) = make_output(&matches);
|
let (odir, ofile) = make_output(&matches);
|
||||||
|
@ -338,7 +338,7 @@ fn run_compiler(
|
||||||
expanded_args: args,
|
expanded_args: args,
|
||||||
};
|
};
|
||||||
|
|
||||||
let has_input = match make_input(&default_early_dcx, &matches.free)? {
|
let has_input = match make_input(&default_early_dcx, &matches.free) {
|
||||||
Some(input) => {
|
Some(input) => {
|
||||||
config.input = input;
|
config.input = input;
|
||||||
true // has input: normal compilation
|
true // has input: normal compilation
|
||||||
|
@ -358,7 +358,7 @@ fn run_compiler(
|
||||||
// printing some information without compiling, or exiting immediately
|
// printing some information without compiling, or exiting immediately
|
||||||
// after parsing, etc.
|
// after parsing, etc.
|
||||||
let early_exit = || {
|
let early_exit = || {
|
||||||
if let Some(guar) = sess.dcx().has_errors() { Err(guar) } else { Ok(()) }
|
sess.dcx().abort_if_errors();
|
||||||
};
|
};
|
||||||
|
|
||||||
// This implements `-Whelp`. It should be handled very early, like
|
// This implements `-Whelp`. It should be handled very early, like
|
||||||
|
@ -389,22 +389,25 @@ fn run_compiler(
|
||||||
}
|
}
|
||||||
|
|
||||||
let linker = compiler.enter(|queries| {
|
let linker = compiler.enter(|queries| {
|
||||||
let early_exit = || early_exit().map(|_| None);
|
let early_exit = || {
|
||||||
|
sess.dcx().abort_if_errors();
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
// Parse the crate root source code (doesn't parse submodules yet)
|
// Parse the crate root source code (doesn't parse submodules yet)
|
||||||
// Everything else is parsed during macro expansion.
|
// Everything else is parsed during macro expansion.
|
||||||
queries.parse()?;
|
queries.parse();
|
||||||
|
|
||||||
// If pretty printing is requested: Figure out the representation, print it and exit
|
// If pretty printing is requested: Figure out the representation, print it and exit
|
||||||
if let Some(pp_mode) = sess.opts.pretty {
|
if let Some(pp_mode) = sess.opts.pretty {
|
||||||
if pp_mode.needs_ast_map() {
|
if pp_mode.needs_ast_map() {
|
||||||
queries.global_ctxt()?.enter(|tcx| {
|
queries.global_ctxt().enter(|tcx| {
|
||||||
tcx.ensure().early_lint_checks(());
|
tcx.ensure().early_lint_checks(());
|
||||||
pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx });
|
pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx });
|
||||||
passes::write_dep_info(tcx);
|
passes::write_dep_info(tcx);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
let krate = queries.parse()?;
|
let krate = queries.parse();
|
||||||
pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing {
|
pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing {
|
||||||
krate: &*krate.borrow(),
|
krate: &*krate.borrow(),
|
||||||
});
|
});
|
||||||
|
@ -423,17 +426,17 @@ fn run_compiler(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure name resolution and macro expansion is run.
|
// Make sure name resolution and macro expansion is run.
|
||||||
queries.global_ctxt()?.enter(|tcx| tcx.resolver_for_lowering());
|
queries.global_ctxt().enter(|tcx| tcx.resolver_for_lowering());
|
||||||
|
|
||||||
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
|
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
|
||||||
queries.global_ctxt()?.enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
|
queries.global_ctxt().enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
|
||||||
}
|
}
|
||||||
|
|
||||||
if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
|
if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
|
||||||
return early_exit();
|
return early_exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
queries.global_ctxt()?.enter(|tcx| {
|
queries.global_ctxt().enter(|tcx| {
|
||||||
passes::write_dep_info(tcx);
|
passes::write_dep_info(tcx);
|
||||||
|
|
||||||
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||||
|
@ -446,23 +449,21 @@ fn run_compiler(
|
||||||
return early_exit();
|
return early_exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
tcx.analysis(())?;
|
let _ = tcx.analysis(());
|
||||||
|
|
||||||
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
|
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
|
||||||
return early_exit();
|
return early_exit();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)?))
|
Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend))
|
||||||
})
|
})
|
||||||
})?;
|
});
|
||||||
|
|
||||||
// Linking is done outside the `compiler.enter()` so that the
|
// Linking is done outside the `compiler.enter()` so that the
|
||||||
// `GlobalCtxt` within `Queries` can be freed as early as possible.
|
// `GlobalCtxt` within `Queries` can be freed as early as possible.
|
||||||
if let Some(linker) = linker {
|
if let Some(linker) = linker {
|
||||||
linker.link(sess, codegen_backend)?;
|
linker.link(sess, codegen_backend);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -495,21 +496,17 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<OutFileNa
|
||||||
|
|
||||||
/// Extract input (string or file and optional path) from matches.
|
/// Extract input (string or file and optional path) from matches.
|
||||||
/// This handles reading from stdin if `-` is provided.
|
/// This handles reading from stdin if `-` is provided.
|
||||||
fn make_input(
|
fn make_input(early_dcx: &EarlyDiagCtxt, free_matches: &[String]) -> Option<Input> {
|
||||||
early_dcx: &EarlyDiagCtxt,
|
|
||||||
free_matches: &[String],
|
|
||||||
) -> Result<Option<Input>, ErrorGuaranteed> {
|
|
||||||
match free_matches {
|
match free_matches {
|
||||||
[] => Ok(None), // no input: we will exit early,
|
[] => None, // no input: we will exit early,
|
||||||
[ifile] if ifile == "-" => {
|
[ifile] if ifile == "-" => {
|
||||||
// read from stdin as `Input::Str`
|
// read from stdin as `Input::Str`
|
||||||
let mut input = String::new();
|
let mut input = String::new();
|
||||||
if io::stdin().read_to_string(&mut input).is_err() {
|
if io::stdin().read_to_string(&mut input).is_err() {
|
||||||
// Immediately stop compilation if there was an issue reading
|
// Immediately stop compilation if there was an issue reading
|
||||||
// the input (for example if the input stream is not UTF-8).
|
// the input (for example if the input stream is not UTF-8).
|
||||||
let reported = early_dcx
|
early_dcx
|
||||||
.early_err("couldn't read from stdin, as it did not contain valid UTF-8");
|
.early_fatal("couldn't read from stdin, as it did not contain valid UTF-8");
|
||||||
return Err(reported);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = match env::var("UNSTABLE_RUSTDOC_TEST_PATH") {
|
let name = match env::var("UNSTABLE_RUSTDOC_TEST_PATH") {
|
||||||
|
@ -525,9 +522,9 @@ fn make_input(
|
||||||
Err(_) => FileName::anon_source_code(&input),
|
Err(_) => FileName::anon_source_code(&input),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Some(Input::Str { name, input }))
|
Some(Input::Str { name, input })
|
||||||
}
|
}
|
||||||
[ifile] => Ok(Some(Input::File(PathBuf::from(ifile)))),
|
[ifile] => Some(Input::File(PathBuf::from(ifile))),
|
||||||
[ifile1, ifile2, ..] => early_dcx.early_fatal(format!(
|
[ifile1, ifile2, ..] => early_dcx.early_fatal(format!(
|
||||||
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
||||||
ifile1, ifile2
|
ifile1, ifile2
|
||||||
|
@ -662,9 +659,7 @@ fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if compiler.codegen_backend.link(sess, codegen_results, &outputs).is_err() {
|
compiler.codegen_backend.link(sess, codegen_results, &outputs);
|
||||||
FatalError.raise();
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
dcx.emit_fatal(RlinkNotAFile {});
|
dcx.emit_fatal(RlinkNotAFile {});
|
||||||
}
|
}
|
||||||
|
@ -1607,7 +1602,8 @@ pub fn main() -> ! {
|
||||||
let exit_code = catch_with_exit_code(|| {
|
let exit_code = catch_with_exit_code(|| {
|
||||||
RunCompiler::new(&args::raw_args(&early_dcx)?, &mut callbacks)
|
RunCompiler::new(&args::raw_args(&early_dcx)?, &mut callbacks)
|
||||||
.set_using_internal_features(using_internal_features)
|
.set_using_internal_features(using_internal_features)
|
||||||
.run()
|
.run();
|
||||||
|
Ok(())
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(format) = callbacks.time_passes {
|
if let Some(format) = callbacks.time_passes {
|
||||||
|
|
|
@ -222,8 +222,8 @@ impl<'tcx> PrintExtra<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) {
|
pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) {
|
||||||
if ppm.needs_analysis() && ex.tcx().analysis(()).is_err() {
|
if ppm.needs_analysis() {
|
||||||
FatalError.raise();
|
let _ = ex.tcx().analysis(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let (src, src_name) = get_source(sess);
|
let (src, src_name) = get_source(sess);
|
||||||
|
|
|
@ -114,7 +114,6 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
|
||||||
use rustc_data_structures::svh::Svh;
|
use rustc_data_structures::svh::Svh;
|
||||||
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
use rustc_data_structures::unord::{UnordMap, UnordSet};
|
||||||
use rustc_data_structures::{base_n, flock};
|
use rustc_data_structures::{base_n, flock};
|
||||||
use rustc_errors::ErrorGuaranteed;
|
|
||||||
use rustc_fs_util::{LinkOrCopy, link_or_copy, try_canonicalize};
|
use rustc_fs_util::{LinkOrCopy, link_or_copy, try_canonicalize};
|
||||||
use rustc_middle::bug;
|
use rustc_middle::bug;
|
||||||
use rustc_session::config::CrateType;
|
use rustc_session::config::CrateType;
|
||||||
|
@ -212,9 +211,9 @@ pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBu
|
||||||
/// The garbage collection will take care of it.
|
/// The garbage collection will take care of it.
|
||||||
///
|
///
|
||||||
/// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph
|
/// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph
|
||||||
pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuaranteed> {
|
pub(crate) fn prepare_session_directory(sess: &Session) {
|
||||||
if sess.opts.incremental.is_none() {
|
if sess.opts.incremental.is_none() {
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _timer = sess.timer("incr_comp_prepare_session_directory");
|
let _timer = sess.timer("incr_comp_prepare_session_directory");
|
||||||
|
@ -224,7 +223,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
|
||||||
// {incr-comp-dir}/{crate-name-and-disambiguator}
|
// {incr-comp-dir}/{crate-name-and-disambiguator}
|
||||||
let crate_dir = crate_path(sess);
|
let crate_dir = crate_path(sess);
|
||||||
debug!("crate-dir: {}", crate_dir.display());
|
debug!("crate-dir: {}", crate_dir.display());
|
||||||
create_dir(sess, &crate_dir, "crate")?;
|
create_dir(sess, &crate_dir, "crate");
|
||||||
|
|
||||||
// Hack: canonicalize the path *after creating the directory*
|
// Hack: canonicalize the path *after creating the directory*
|
||||||
// because, on windows, long paths can cause problems;
|
// because, on windows, long paths can cause problems;
|
||||||
|
@ -233,7 +232,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
|
||||||
let crate_dir = match try_canonicalize(&crate_dir) {
|
let crate_dir = match try_canonicalize(&crate_dir) {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(sess.dcx().emit_err(errors::CanonicalizePath { path: crate_dir, err }));
|
sess.dcx().emit_fatal(errors::CanonicalizePath { path: crate_dir, err });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -248,11 +247,11 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
|
||||||
|
|
||||||
// Lock the new session directory. If this fails, return an
|
// Lock the new session directory. If this fails, return an
|
||||||
// error without retrying
|
// error without retrying
|
||||||
let (directory_lock, lock_file_path) = lock_directory(sess, &session_dir)?;
|
let (directory_lock, lock_file_path) = lock_directory(sess, &session_dir);
|
||||||
|
|
||||||
// Now that we have the lock, we can actually create the session
|
// Now that we have the lock, we can actually create the session
|
||||||
// directory
|
// directory
|
||||||
create_dir(sess, &session_dir, "session")?;
|
create_dir(sess, &session_dir, "session");
|
||||||
|
|
||||||
// Find a suitable source directory to copy from. Ignore those that we
|
// Find a suitable source directory to copy from. Ignore those that we
|
||||||
// have already tried before.
|
// have already tried before.
|
||||||
|
@ -266,7 +265,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
|
||||||
);
|
);
|
||||||
|
|
||||||
sess.init_incr_comp_session(session_dir, directory_lock);
|
sess.init_incr_comp_session(session_dir, directory_lock);
|
||||||
return Ok(());
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
debug!("attempting to copy data from source: {}", source_directory.display());
|
debug!("attempting to copy data from source: {}", source_directory.display());
|
||||||
|
@ -280,7 +279,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
|
||||||
}
|
}
|
||||||
|
|
||||||
sess.init_incr_comp_session(session_dir, directory_lock);
|
sess.init_incr_comp_session(session_dir, directory_lock);
|
||||||
return Ok(());
|
return;
|
||||||
} else {
|
} else {
|
||||||
debug!("copying failed - trying next directory");
|
debug!("copying failed - trying next directory");
|
||||||
|
|
||||||
|
@ -459,21 +458,17 @@ fn generate_session_dir_path(crate_dir: &Path) -> PathBuf {
|
||||||
directory_path
|
directory_path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(), ErrorGuaranteed> {
|
fn create_dir(sess: &Session, path: &Path, dir_tag: &str) {
|
||||||
match std_fs::create_dir_all(path) {
|
match std_fs::create_dir_all(path) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
debug!("{} directory created successfully", dir_tag);
|
debug!("{} directory created successfully", dir_tag);
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
Err(err) => Err(sess.dcx().emit_err(errors::CreateIncrCompDir { tag: dir_tag, path, err })),
|
Err(err) => sess.dcx().emit_fatal(errors::CreateIncrCompDir { tag: dir_tag, path, err }),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate the lock-file and lock it.
|
/// Allocate the lock-file and lock it.
|
||||||
fn lock_directory(
|
fn lock_directory(sess: &Session, session_dir: &Path) -> (flock::Lock, PathBuf) {
|
||||||
sess: &Session,
|
|
||||||
session_dir: &Path,
|
|
||||||
) -> Result<(flock::Lock, PathBuf), ErrorGuaranteed> {
|
|
||||||
let lock_file_path = lock_file_path(session_dir);
|
let lock_file_path = lock_file_path(session_dir);
|
||||||
debug!("lock_directory() - lock_file: {}", lock_file_path.display());
|
debug!("lock_directory() - lock_file: {}", lock_file_path.display());
|
||||||
|
|
||||||
|
@ -484,15 +479,15 @@ fn lock_directory(
|
||||||
true,
|
true,
|
||||||
) {
|
) {
|
||||||
// the lock should be exclusive
|
// the lock should be exclusive
|
||||||
Ok(lock) => Ok((lock, lock_file_path)),
|
Ok(lock) => (lock, lock_file_path),
|
||||||
Err(lock_err) => {
|
Err(lock_err) => {
|
||||||
let is_unsupported_lock = flock::Lock::error_unsupported(&lock_err);
|
let is_unsupported_lock = flock::Lock::error_unsupported(&lock_err);
|
||||||
Err(sess.dcx().emit_err(errors::CreateLock {
|
sess.dcx().emit_fatal(errors::CreateLock {
|
||||||
lock_err,
|
lock_err,
|
||||||
session_dir,
|
session_dir,
|
||||||
is_unsupported_lock,
|
is_unsupported_lock,
|
||||||
is_cargo: rustc_session::utils::was_invoked_from_cargo(),
|
is_cargo: rustc_session::utils::was_invoked_from_cargo(),
|
||||||
}))
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,6 @@ use rustc_serialize::Decodable;
|
||||||
use rustc_serialize::opaque::MemDecoder;
|
use rustc_serialize::opaque::MemDecoder;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
use rustc_session::config::IncrementalStateAssertion;
|
use rustc_session::config::IncrementalStateAssertion;
|
||||||
use rustc_span::ErrorGuaranteed;
|
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use super::data::*;
|
use super::data::*;
|
||||||
|
@ -204,9 +203,9 @@ pub fn load_query_result_cache(sess: &Session) -> Option<OnDiskCache> {
|
||||||
|
|
||||||
/// Setups the dependency graph by loading an existing graph from disk and set up streaming of a
|
/// Setups the dependency graph by loading an existing graph from disk and set up streaming of a
|
||||||
/// new graph to an incremental session directory.
|
/// new graph to an incremental session directory.
|
||||||
pub fn setup_dep_graph(sess: &Session) -> Result<DepGraph, ErrorGuaranteed> {
|
pub fn setup_dep_graph(sess: &Session) -> DepGraph {
|
||||||
// `load_dep_graph` can only be called after `prepare_session_directory`.
|
// `load_dep_graph` can only be called after `prepare_session_directory`.
|
||||||
prepare_session_directory(sess)?;
|
prepare_session_directory(sess);
|
||||||
|
|
||||||
let res = sess.opts.build_dep_graph().then(|| load_dep_graph(sess));
|
let res = sess.opts.build_dep_graph().then(|| load_dep_graph(sess));
|
||||||
|
|
||||||
|
@ -222,10 +221,9 @@ pub fn setup_dep_graph(sess: &Session) -> Result<DepGraph, ErrorGuaranteed> {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(res
|
res.and_then(|result| {
|
||||||
.and_then(|result| {
|
let (prev_graph, prev_work_products) = result.open(sess);
|
||||||
let (prev_graph, prev_work_products) = result.open(sess);
|
build_dep_graph(sess, prev_graph, prev_work_products)
|
||||||
build_dep_graph(sess, prev_graph, prev_work_products)
|
})
|
||||||
})
|
.unwrap_or_else(DepGraph::new_disabled)
|
||||||
.unwrap_or_else(DepGraph::new_disabled))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,15 +33,15 @@ use rustc_session::output::{collect_crate_types, filename_for_input, find_crate_
|
||||||
use rustc_session::search_paths::PathKind;
|
use rustc_session::search_paths::PathKind;
|
||||||
use rustc_session::{Limit, Session};
|
use rustc_session::{Limit, Session};
|
||||||
use rustc_span::symbol::{Symbol, sym};
|
use rustc_span::symbol::{Symbol, sym};
|
||||||
use rustc_span::{FileName, SourceFileHash, SourceFileHashAlgorithm};
|
use rustc_span::{ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm};
|
||||||
use rustc_target::spec::PanicStrategy;
|
use rustc_target::spec::PanicStrategy;
|
||||||
use rustc_trait_selection::traits;
|
use rustc_trait_selection::traits;
|
||||||
use tracing::{info, instrument};
|
use tracing::{info, instrument};
|
||||||
|
|
||||||
use crate::interface::{Compiler, Result};
|
use crate::interface::Compiler;
|
||||||
use crate::{errors, proc_macro_decls, util};
|
use crate::{errors, proc_macro_decls, util};
|
||||||
|
|
||||||
pub(crate) fn parse<'a>(sess: &'a Session) -> Result<ast::Crate> {
|
pub(crate) fn parse<'a>(sess: &'a Session) -> ast::Crate {
|
||||||
let krate = sess
|
let krate = sess
|
||||||
.time("parse_crate", || {
|
.time("parse_crate", || {
|
||||||
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
|
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
|
||||||
|
@ -52,13 +52,16 @@ pub(crate) fn parse<'a>(sess: &'a Session) -> Result<ast::Crate> {
|
||||||
});
|
});
|
||||||
parser.parse_crate_mod()
|
parser.parse_crate_mod()
|
||||||
})
|
})
|
||||||
.map_err(|parse_error| parse_error.emit())?;
|
.unwrap_or_else(|parse_error| {
|
||||||
|
let guar: ErrorGuaranteed = parse_error.emit();
|
||||||
|
guar.raise_fatal();
|
||||||
|
});
|
||||||
|
|
||||||
if sess.opts.unstable_opts.input_stats {
|
if sess.opts.unstable_opts.input_stats {
|
||||||
input_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS", "ast-stats-1");
|
input_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS", "ast-stats-1");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(krate)
|
krate
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pre_expansion_lint<'a>(
|
fn pre_expansion_lint<'a>(
|
||||||
|
@ -712,7 +715,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
|
||||||
gcx_cell: &'tcx OnceLock<GlobalCtxt<'tcx>>,
|
gcx_cell: &'tcx OnceLock<GlobalCtxt<'tcx>>,
|
||||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||||
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
|
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||||
) -> Result<&'tcx GlobalCtxt<'tcx>> {
|
) -> &'tcx GlobalCtxt<'tcx> {
|
||||||
let sess = &compiler.sess;
|
let sess = &compiler.sess;
|
||||||
|
|
||||||
rustc_builtin_macros::cmdline_attrs::inject(
|
rustc_builtin_macros::cmdline_attrs::inject(
|
||||||
|
@ -733,7 +736,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
|
||||||
sess.cfg_version,
|
sess.cfg_version,
|
||||||
);
|
);
|
||||||
let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
|
let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
|
||||||
let dep_graph = setup_dep_graph(sess)?;
|
let dep_graph = setup_dep_graph(sess);
|
||||||
|
|
||||||
let cstore =
|
let cstore =
|
||||||
FreezeLock::new(Box::new(CStore::new(compiler.codegen_backend.metadata_loader())) as _);
|
FreezeLock::new(Box::new(CStore::new(compiler.codegen_backend.metadata_loader())) as _);
|
||||||
|
@ -796,7 +799,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
|
||||||
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
|
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
|
||||||
feed.output_filenames(Arc::new(outputs));
|
feed.output_filenames(Arc::new(outputs));
|
||||||
});
|
});
|
||||||
Ok(qcx)
|
qcx
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -908,7 +911,7 @@ fn run_required_analyses(tcx: TyCtxt<'_>) {
|
||||||
|
|
||||||
/// Runs the type-checking, region checking and other miscellaneous analysis
|
/// Runs the type-checking, region checking and other miscellaneous analysis
|
||||||
/// passes on the crate.
|
/// passes on the crate.
|
||||||
fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
|
fn analysis(tcx: TyCtxt<'_>, (): ()) {
|
||||||
run_required_analyses(tcx);
|
run_required_analyses(tcx);
|
||||||
|
|
||||||
let sess = tcx.sess;
|
let sess = tcx.sess;
|
||||||
|
@ -922,7 +925,7 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
|
||||||
// But we exclude lint errors from this, because lint errors are typically
|
// But we exclude lint errors from this, because lint errors are typically
|
||||||
// less serious and we're more likely to want to continue (#87337).
|
// less serious and we're more likely to want to continue (#87337).
|
||||||
if let Some(guar) = sess.dcx().has_errors_excluding_lint_errors() {
|
if let Some(guar) = sess.dcx().has_errors_excluding_lint_errors() {
|
||||||
return Err(guar);
|
guar.raise_fatal();
|
||||||
}
|
}
|
||||||
|
|
||||||
sess.time("misc_checking_3", || {
|
sess.time("misc_checking_3", || {
|
||||||
|
@ -1050,8 +1053,6 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
|
/// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
|
||||||
|
@ -1093,12 +1094,12 @@ fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
|
||||||
pub(crate) fn start_codegen<'tcx>(
|
pub(crate) fn start_codegen<'tcx>(
|
||||||
codegen_backend: &dyn CodegenBackend,
|
codegen_backend: &dyn CodegenBackend,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
) -> Result<Box<dyn Any>> {
|
) -> Box<dyn Any> {
|
||||||
// Don't do code generation if there were any errors. Likewise if
|
// Don't do code generation if there were any errors. Likewise if
|
||||||
// there were any delayed bugs, because codegen will likely cause
|
// there were any delayed bugs, because codegen will likely cause
|
||||||
// more ICEs, obscuring the original problem.
|
// more ICEs, obscuring the original problem.
|
||||||
if let Some(guar) = tcx.sess.dcx().has_errors_or_delayed_bugs() {
|
if let Some(guar) = tcx.sess.dcx().has_errors_or_delayed_bugs() {
|
||||||
return Err(guar);
|
guar.raise_fatal();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hook for UI tests.
|
// Hook for UI tests.
|
||||||
|
@ -1126,7 +1127,7 @@ pub(crate) fn start_codegen<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(codegen)
|
codegen
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit {
|
fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit {
|
||||||
|
|
|
@ -16,7 +16,7 @@ use rustc_session::Session;
|
||||||
use rustc_session::config::{self, OutputFilenames, OutputType};
|
use rustc_session::config::{self, OutputFilenames, OutputType};
|
||||||
|
|
||||||
use crate::errors::FailedWritingFile;
|
use crate::errors::FailedWritingFile;
|
||||||
use crate::interface::{Compiler, Result};
|
use crate::interface::Compiler;
|
||||||
use crate::passes;
|
use crate::passes;
|
||||||
|
|
||||||
/// Represent the result of a query.
|
/// Represent the result of a query.
|
||||||
|
@ -27,19 +27,17 @@ use crate::passes;
|
||||||
/// [`compute`]: Self::compute
|
/// [`compute`]: Self::compute
|
||||||
pub struct Query<T> {
|
pub struct Query<T> {
|
||||||
/// `None` means no value has been computed yet.
|
/// `None` means no value has been computed yet.
|
||||||
result: RefCell<Option<Result<Steal<T>>>>,
|
result: RefCell<Option<Steal<T>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Query<T> {
|
impl<T> Query<T> {
|
||||||
fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<QueryResult<'_, T>> {
|
fn compute<F: FnOnce() -> T>(&self, f: F) -> QueryResult<'_, T> {
|
||||||
RefMut::filter_map(
|
QueryResult(RefMut::map(
|
||||||
self.result.borrow_mut(),
|
self.result.borrow_mut(),
|
||||||
|r: &mut Option<Result<Steal<T>>>| -> Option<&mut Steal<T>> {
|
|r: &mut Option<Steal<T>>| -> &mut Steal<T> {
|
||||||
r.get_or_insert_with(|| f().map(Steal::new)).as_mut().ok()
|
r.get_or_insert_with(|| Steal::new(f()))
|
||||||
},
|
},
|
||||||
)
|
))
|
||||||
.map_err(|r| *r.as_ref().unwrap().as_ref().map(|_| ()).unwrap_err())
|
|
||||||
.map(QueryResult)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,13 +93,13 @@ impl<'tcx> Queries<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(&self) -> Result<QueryResult<'_, ast::Crate>> {
|
pub fn parse(&self) -> QueryResult<'_, ast::Crate> {
|
||||||
self.parse.compute(|| passes::parse(&self.compiler.sess))
|
self.parse.compute(|| passes::parse(&self.compiler.sess))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>>> {
|
pub fn global_ctxt(&'tcx self) -> QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>> {
|
||||||
self.gcx.compute(|| {
|
self.gcx.compute(|| {
|
||||||
let krate = self.parse()?.steal();
|
let krate = self.parse().steal();
|
||||||
|
|
||||||
passes::create_global_ctxt(
|
passes::create_global_ctxt(
|
||||||
self.compiler,
|
self.compiler,
|
||||||
|
@ -126,8 +124,8 @@ impl Linker {
|
||||||
pub fn codegen_and_build_linker(
|
pub fn codegen_and_build_linker(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
codegen_backend: &dyn CodegenBackend,
|
codegen_backend: &dyn CodegenBackend,
|
||||||
) -> Result<Linker> {
|
) -> Linker {
|
||||||
let ongoing_codegen = passes::start_codegen(codegen_backend, tcx)?;
|
let ongoing_codegen = passes::start_codegen(codegen_backend, tcx);
|
||||||
|
|
||||||
// This must run after monomorphization so that all generic types
|
// This must run after monomorphization so that all generic types
|
||||||
// have been instantiated.
|
// have been instantiated.
|
||||||
|
@ -141,7 +139,7 @@ impl Linker {
|
||||||
tcx.sess.code_stats.print_vtable_sizes(crate_name);
|
tcx.sess.code_stats.print_vtable_sizes(crate_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Linker {
|
Linker {
|
||||||
dep_graph: tcx.dep_graph.clone(),
|
dep_graph: tcx.dep_graph.clone(),
|
||||||
output_filenames: Arc::clone(tcx.output_filenames(())),
|
output_filenames: Arc::clone(tcx.output_filenames(())),
|
||||||
crate_hash: if tcx.needs_crate_hash() {
|
crate_hash: if tcx.needs_crate_hash() {
|
||||||
|
@ -150,17 +148,15 @@ impl Linker {
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
ongoing_codegen,
|
ongoing_codegen,
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) -> Result<()> {
|
pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) {
|
||||||
let (codegen_results, work_products) = sess.time("finish_ongoing_codegen", || {
|
let (codegen_results, work_products) = sess.time("finish_ongoing_codegen", || {
|
||||||
codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames)
|
codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames)
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(guar) = sess.dcx().has_errors() {
|
sess.dcx().abort_if_errors();
|
||||||
return Err(guar);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _timer = sess.timer("link");
|
let _timer = sess.timer("link");
|
||||||
|
|
||||||
|
@ -181,7 +177,7 @@ impl Linker {
|
||||||
.keys()
|
.keys()
|
||||||
.any(|&i| i == OutputType::Exe || i == OutputType::Metadata)
|
.any(|&i| i == OutputType::Exe || i == OutputType::Metadata)
|
||||||
{
|
{
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if sess.opts.unstable_opts.no_link {
|
if sess.opts.unstable_opts.no_link {
|
||||||
|
@ -192,10 +188,10 @@ impl Linker {
|
||||||
&codegen_results,
|
&codegen_results,
|
||||||
&*self.output_filenames,
|
&*self.output_filenames,
|
||||||
)
|
)
|
||||||
.map_err(|error| {
|
.unwrap_or_else(|error| {
|
||||||
sess.dcx().emit_fatal(FailedWritingFile { path: &rlink_file, error })
|
sess.dcx().emit_fatal(FailedWritingFile { path: &rlink_file, error })
|
||||||
})?;
|
});
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let _timer = sess.prof.verbose_generic_activity("link_crate");
|
let _timer = sess.prof.verbose_generic_activity("link_crate");
|
||||||
|
|
|
@ -276,7 +276,7 @@ rustc_queries! {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The root query triggering all analysis passes like typeck or borrowck.
|
/// The root query triggering all analysis passes like typeck or borrowck.
|
||||||
query analysis(key: ()) -> Result<(), ErrorGuaranteed> {
|
query analysis(key: ()) {
|
||||||
eval_always
|
eval_always
|
||||||
desc { "running analysis passes on this crate" }
|
desc { "running analysis passes on this crate" }
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,6 +51,7 @@ pub mod source_map;
|
||||||
use source_map::{SourceMap, SourceMapInputs};
|
use source_map::{SourceMap, SourceMapInputs};
|
||||||
|
|
||||||
pub use self::caching_source_map_view::CachingSourceMapView;
|
pub use self::caching_source_map_view::CachingSourceMapView;
|
||||||
|
use crate::fatal_error::FatalError;
|
||||||
|
|
||||||
pub mod edition;
|
pub mod edition;
|
||||||
use edition::Edition;
|
use edition::Edition;
|
||||||
|
@ -2614,6 +2615,10 @@ impl ErrorGuaranteed {
|
||||||
pub fn unchecked_error_guaranteed() -> Self {
|
pub fn unchecked_error_guaranteed() -> Self {
|
||||||
ErrorGuaranteed(())
|
ErrorGuaranteed(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn raise_fatal(self) -> ! {
|
||||||
|
FatalError.raise()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {
|
impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {
|
||||||
|
|
|
@ -5,12 +5,12 @@ use std::{io, mem};
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_data_structures::unord::UnordSet;
|
use rustc_data_structures::unord::UnordSet;
|
||||||
|
use rustc_errors::TerminalUrl;
|
||||||
use rustc_errors::codes::*;
|
use rustc_errors::codes::*;
|
||||||
use rustc_errors::emitter::{
|
use rustc_errors::emitter::{
|
||||||
DynEmitter, HumanEmitter, HumanReadableErrorType, OutputTheme, stderr_destination,
|
DynEmitter, HumanEmitter, HumanReadableErrorType, OutputTheme, stderr_destination,
|
||||||
};
|
};
|
||||||
use rustc_errors::json::JsonEmitter;
|
use rustc_errors::json::JsonEmitter;
|
||||||
use rustc_errors::{ErrorGuaranteed, TerminalUrl};
|
|
||||||
use rustc_feature::UnstableFeatures;
|
use rustc_feature::UnstableFeatures;
|
||||||
use rustc_hir::def::Res;
|
use rustc_hir::def::Res;
|
||||||
use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId};
|
use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId};
|
||||||
|
@ -326,7 +326,7 @@ pub(crate) fn run_global_ctxt(
|
||||||
show_coverage: bool,
|
show_coverage: bool,
|
||||||
render_options: RenderOptions,
|
render_options: RenderOptions,
|
||||||
output_format: OutputFormat,
|
output_format: OutputFormat,
|
||||||
) -> Result<(clean::Crate, RenderOptions, Cache), ErrorGuaranteed> {
|
) -> (clean::Crate, RenderOptions, Cache) {
|
||||||
// Certain queries assume that some checks were run elsewhere
|
// Certain queries assume that some checks were run elsewhere
|
||||||
// (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
|
// (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
|
||||||
// so type-check everything other than function bodies in this crate before running lints.
|
// so type-check everything other than function bodies in this crate before running lints.
|
||||||
|
@ -340,9 +340,7 @@ pub(crate) fn run_global_ctxt(
|
||||||
tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module))
|
tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module))
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(guar) = tcx.dcx().has_errors() {
|
tcx.dcx().abort_if_errors();
|
||||||
return Err(guar);
|
|
||||||
}
|
|
||||||
|
|
||||||
tcx.sess.time("missing_docs", || rustc_lint::check_crate(tcx));
|
tcx.sess.time("missing_docs", || rustc_lint::check_crate(tcx));
|
||||||
tcx.sess.time("check_mod_attrs", || {
|
tcx.sess.time("check_mod_attrs", || {
|
||||||
|
@ -446,11 +444,9 @@ pub(crate) fn run_global_ctxt(
|
||||||
LinkCollector { cx: &mut ctxt, visited_links: visited, ambiguous_links: ambiguous };
|
LinkCollector { cx: &mut ctxt, visited_links: visited, ambiguous_links: ambiguous };
|
||||||
collector.resolve_ambiguities();
|
collector.resolve_ambiguities();
|
||||||
|
|
||||||
if let Some(guar) = tcx.dcx().has_errors() {
|
tcx.dcx().abort_if_errors();
|
||||||
return Err(guar);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((krate, ctxt.render_options, ctxt.cache))
|
(krate, ctxt.render_options, ctxt.cache)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Due to <https://github.com/rust-lang/rust/pull/73566>,
|
/// Due to <https://github.com/rust-lang/rust/pull/73566>,
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub(crate) use markdown::test as test_markdown;
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||||
use rustc_errors::emitter::HumanReadableErrorType;
|
use rustc_errors::emitter::HumanReadableErrorType;
|
||||||
use rustc_errors::{ColorConfig, DiagCtxtHandle, ErrorGuaranteed, FatalError};
|
use rustc_errors::{ColorConfig, DiagCtxtHandle};
|
||||||
use rustc_hir::CRATE_HIR_ID;
|
use rustc_hir::CRATE_HIR_ID;
|
||||||
use rustc_hir::def_id::LOCAL_CRATE;
|
use rustc_hir::def_id::LOCAL_CRATE;
|
||||||
use rustc_interface::interface;
|
use rustc_interface::interface;
|
||||||
|
@ -89,11 +89,7 @@ fn get_doctest_dir() -> io::Result<TempDir> {
|
||||||
TempFileBuilder::new().prefix("rustdoctest").tempdir()
|
TempFileBuilder::new().prefix("rustdoctest").tempdir()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn run(
|
pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions) {
|
||||||
dcx: DiagCtxtHandle<'_>,
|
|
||||||
input: Input,
|
|
||||||
options: RustdocOptions,
|
|
||||||
) -> Result<(), ErrorGuaranteed> {
|
|
||||||
let invalid_codeblock_attributes_name = crate::lint::INVALID_CODEBLOCK_ATTRIBUTES.name;
|
let invalid_codeblock_attributes_name = crate::lint::INVALID_CODEBLOCK_ATTRIBUTES.name;
|
||||||
|
|
||||||
// See core::create_config for what's going on here.
|
// See core::create_config for what's going on here.
|
||||||
|
@ -167,7 +163,7 @@ pub(crate) fn run(
|
||||||
Err(error) => return crate::wrap_return(dcx, Err(error)),
|
Err(error) => return crate::wrap_return(dcx, Err(error)),
|
||||||
};
|
};
|
||||||
let args_path = temp_dir.path().join("rustdoc-cfgs");
|
let args_path = temp_dir.path().join("rustdoc-cfgs");
|
||||||
crate::wrap_return(dcx, generate_args_file(&args_path, &options))?;
|
crate::wrap_return(dcx, generate_args_file(&args_path, &options));
|
||||||
|
|
||||||
let CreateRunnableDocTests {
|
let CreateRunnableDocTests {
|
||||||
standalone_tests,
|
standalone_tests,
|
||||||
|
@ -179,7 +175,7 @@ pub(crate) fn run(
|
||||||
..
|
..
|
||||||
} = interface::run_compiler(config, |compiler| {
|
} = interface::run_compiler(config, |compiler| {
|
||||||
compiler.enter(|queries| {
|
compiler.enter(|queries| {
|
||||||
let collector = queries.global_ctxt()?.enter(|tcx| {
|
let collector = queries.global_ctxt().enter(|tcx| {
|
||||||
let crate_name = tcx.crate_name(LOCAL_CRATE).to_string();
|
let crate_name = tcx.crate_name(LOCAL_CRATE).to_string();
|
||||||
let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID);
|
let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID);
|
||||||
let opts = scrape_test_config(crate_name, crate_attrs, args_path);
|
let opts = scrape_test_config(crate_name, crate_attrs, args_path);
|
||||||
|
@ -196,13 +192,11 @@ pub(crate) fn run(
|
||||||
|
|
||||||
collector
|
collector
|
||||||
});
|
});
|
||||||
if compiler.sess.dcx().has_errors().is_some() {
|
compiler.sess.dcx().abort_if_errors();
|
||||||
FatalError.raise();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(collector)
|
collector
|
||||||
})
|
})
|
||||||
})?;
|
});
|
||||||
|
|
||||||
run_tests(opts, &rustdoc_options, &unused_extern_reports, standalone_tests, mergeable_tests);
|
run_tests(opts, &rustdoc_options, &unused_extern_reports, standalone_tests, mergeable_tests);
|
||||||
|
|
||||||
|
@ -246,8 +240,6 @@ pub(crate) fn run(
|
||||||
eprintln!("{unused_extern_json}");
|
eprintln!("{unused_extern_json}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn run_tests(
|
pub(crate) fn run_tests(
|
||||||
|
|
|
@ -76,7 +76,7 @@ use std::process;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::sync::atomic::AtomicBool;
|
||||||
|
|
||||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
|
use rustc_errors::DiagCtxtHandle;
|
||||||
use rustc_interface::interface;
|
use rustc_interface::interface;
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use rustc_session::config::{ErrorOutputType, RustcOptGroup, make_crate_type_option};
|
use rustc_session::config::{ErrorOutputType, RustcOptGroup, make_crate_type_option};
|
||||||
|
@ -179,7 +179,8 @@ pub fn main() {
|
||||||
|
|
||||||
let exit_code = rustc_driver::catch_with_exit_code(|| {
|
let exit_code = rustc_driver::catch_with_exit_code(|| {
|
||||||
let at_args = rustc_driver::args::raw_args(&early_dcx)?;
|
let at_args = rustc_driver::args::raw_args(&early_dcx)?;
|
||||||
main_args(&mut early_dcx, &at_args, using_internal_features)
|
main_args(&mut early_dcx, &at_args, using_internal_features);
|
||||||
|
Ok(())
|
||||||
});
|
});
|
||||||
process::exit(exit_code);
|
process::exit(exit_code);
|
||||||
}
|
}
|
||||||
|
@ -699,13 +700,10 @@ fn usage(argv0: &str) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A result type used by several functions under `main()`.
|
pub(crate) fn wrap_return(dcx: DiagCtxtHandle<'_>, res: Result<(), String>) {
|
||||||
type MainResult = Result<(), ErrorGuaranteed>;
|
|
||||||
|
|
||||||
pub(crate) fn wrap_return(dcx: DiagCtxtHandle<'_>, res: Result<(), String>) -> MainResult {
|
|
||||||
match res {
|
match res {
|
||||||
Ok(()) => dcx.has_errors().map_or(Ok(()), Err),
|
Ok(()) => dcx.abort_if_errors(),
|
||||||
Err(err) => Err(dcx.err(err)),
|
Err(err) => dcx.fatal(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -714,17 +712,17 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
|
||||||
renderopts: config::RenderOptions,
|
renderopts: config::RenderOptions,
|
||||||
cache: formats::cache::Cache,
|
cache: formats::cache::Cache,
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
) -> MainResult {
|
) {
|
||||||
match formats::run_format::<T>(krate, renderopts, cache, tcx) {
|
match formats::run_format::<T>(krate, renderopts, cache, tcx) {
|
||||||
Ok(_) => tcx.dcx().has_errors().map_or(Ok(()), Err),
|
Ok(_) => tcx.dcx().abort_if_errors(),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
let mut msg =
|
let mut msg =
|
||||||
tcx.dcx().struct_err(format!("couldn't generate documentation: {}", e.error));
|
tcx.dcx().struct_fatal(format!("couldn't generate documentation: {}", e.error));
|
||||||
let file = e.file.display().to_string();
|
let file = e.file.display().to_string();
|
||||||
if !file.is_empty() {
|
if !file.is_empty() {
|
||||||
msg.note(format!("failed to create or modify \"{file}\""));
|
msg.note(format!("failed to create or modify \"{file}\""));
|
||||||
}
|
}
|
||||||
Err(msg.emit())
|
msg.emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -759,7 +757,7 @@ fn main_args(
|
||||||
early_dcx: &mut EarlyDiagCtxt,
|
early_dcx: &mut EarlyDiagCtxt,
|
||||||
at_args: &[String],
|
at_args: &[String],
|
||||||
using_internal_features: Arc<AtomicBool>,
|
using_internal_features: Arc<AtomicBool>,
|
||||||
) -> MainResult {
|
) {
|
||||||
// Throw away the first argument, the name of the binary.
|
// Throw away the first argument, the name of the binary.
|
||||||
// In case of at_args being empty, as might be the case by
|
// In case of at_args being empty, as might be the case by
|
||||||
// passing empty argument array to execve under some platforms,
|
// passing empty argument array to execve under some platforms,
|
||||||
|
@ -770,7 +768,7 @@ fn main_args(
|
||||||
// the compiler with @empty_file as argv[0] and no more arguments.
|
// the compiler with @empty_file as argv[0] and no more arguments.
|
||||||
let at_args = at_args.get(1..).unwrap_or_default();
|
let at_args = at_args.get(1..).unwrap_or_default();
|
||||||
|
|
||||||
let args = rustc_driver::args::arg_expand_all(early_dcx, at_args)?;
|
let args = rustc_driver::args::arg_expand_all(early_dcx, at_args);
|
||||||
|
|
||||||
let mut options = getopts::Options::new();
|
let mut options = getopts::Options::new();
|
||||||
for option in opts() {
|
for option in opts() {
|
||||||
|
@ -788,7 +786,7 @@ fn main_args(
|
||||||
let (input, options, render_options) =
|
let (input, options, render_options) =
|
||||||
match config::Options::from_matches(early_dcx, &matches, args) {
|
match config::Options::from_matches(early_dcx, &matches, args) {
|
||||||
Some(opts) => opts,
|
Some(opts) => opts,
|
||||||
None => return Ok(()),
|
None => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
let dcx =
|
let dcx =
|
||||||
|
@ -853,11 +851,11 @@ fn main_args(
|
||||||
|
|
||||||
if sess.opts.describe_lints {
|
if sess.opts.describe_lints {
|
||||||
rustc_driver::describe_lints(sess);
|
rustc_driver::describe_lints(sess);
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
compiler.enter(|queries| {
|
compiler.enter(|queries| {
|
||||||
let Ok(mut gcx) = queries.global_ctxt() else { FatalError.raise() };
|
let mut gcx = queries.global_ctxt();
|
||||||
if sess.dcx().has_errors().is_some() {
|
if sess.dcx().has_errors().is_some() {
|
||||||
sess.dcx().fatal("Compilation failed, aborting rustdoc");
|
sess.dcx().fatal("Compilation failed, aborting rustdoc");
|
||||||
}
|
}
|
||||||
|
@ -865,7 +863,7 @@ fn main_args(
|
||||||
gcx.enter(|tcx| {
|
gcx.enter(|tcx| {
|
||||||
let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
|
let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
|
||||||
core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
|
core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
|
||||||
})?;
|
});
|
||||||
info!("finished with rustc");
|
info!("finished with rustc");
|
||||||
|
|
||||||
if let Some(options) = scrape_examples_options {
|
if let Some(options) = scrape_examples_options {
|
||||||
|
@ -884,10 +882,10 @@ fn main_args(
|
||||||
if show_coverage {
|
if show_coverage {
|
||||||
// if we ran coverage, bail early, we don't need to also generate docs at this point
|
// if we ran coverage, bail early, we don't need to also generate docs at this point
|
||||||
// (also we didn't load in any of the useful passes)
|
// (also we didn't load in any of the useful passes)
|
||||||
return Ok(());
|
return;
|
||||||
} else if run_check {
|
} else if run_check {
|
||||||
// Since we're in "check" mode, no need to generate anything beyond this point.
|
// Since we're in "check" mode, no need to generate anything beyond this point.
|
||||||
return Ok(());
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("going to format");
|
info!("going to format");
|
||||||
|
|
|
@ -7,7 +7,6 @@ use rustc_data_structures::fx::FxIndexMap;
|
||||||
use rustc_errors::DiagCtxtHandle;
|
use rustc_errors::DiagCtxtHandle;
|
||||||
use rustc_hir::intravisit::{self, Visitor};
|
use rustc_hir::intravisit::{self, Visitor};
|
||||||
use rustc_hir::{self as hir};
|
use rustc_hir::{self as hir};
|
||||||
use rustc_interface::interface;
|
|
||||||
use rustc_macros::{Decodable, Encodable};
|
use rustc_macros::{Decodable, Encodable};
|
||||||
use rustc_middle::hir::nested_filter;
|
use rustc_middle::hir::nested_filter;
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, TyCtxt};
|
||||||
|
@ -275,7 +274,7 @@ pub(crate) fn run(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
options: ScrapeExamplesOptions,
|
options: ScrapeExamplesOptions,
|
||||||
bin_crate: bool,
|
bin_crate: bool,
|
||||||
) -> interface::Result<()> {
|
) {
|
||||||
let inner = move || -> Result<(), String> {
|
let inner = move || -> Result<(), String> {
|
||||||
// Generates source files for examples
|
// Generates source files for examples
|
||||||
renderopts.no_emit_shared = true;
|
renderopts.no_emit_shared = true;
|
||||||
|
@ -329,8 +328,6 @@ pub(crate) fn run(
|
||||||
if let Err(e) = inner() {
|
if let Err(e) = inner() {
|
||||||
tcx.dcx().fatal(e);
|
tcx.dcx().fatal(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: the DiagCtxt must be passed in explicitly because sess isn't available while parsing
|
// Note: the DiagCtxt must be passed in explicitly because sess isn't available while parsing
|
||||||
|
|
|
@ -236,7 +236,8 @@ pub fn main() {
|
||||||
let mut args: Vec<String> = orig_args.clone();
|
let mut args: Vec<String> = orig_args.clone();
|
||||||
pass_sysroot_env_if_given(&mut args, sys_root_env);
|
pass_sysroot_env_if_given(&mut args, sys_root_env);
|
||||||
|
|
||||||
return rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();
|
rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if orig_args.iter().any(|a| a == "--version" || a == "-V") {
|
if orig_args.iter().any(|a| a == "--version" || a == "-V") {
|
||||||
|
@ -296,12 +297,13 @@ pub fn main() {
|
||||||
args.extend(clippy_args);
|
args.extend(clippy_args);
|
||||||
rustc_driver::RunCompiler::new(&args, &mut ClippyCallbacks { clippy_args_var })
|
rustc_driver::RunCompiler::new(&args, &mut ClippyCallbacks { clippy_args_var })
|
||||||
.set_using_internal_features(using_internal_features)
|
.set_using_internal_features(using_internal_features)
|
||||||
.run()
|
.run();
|
||||||
} else {
|
} else {
|
||||||
rustc_driver::RunCompiler::new(&args, &mut RustcCallbacks { clippy_args_var })
|
rustc_driver::RunCompiler::new(&args, &mut RustcCallbacks { clippy_args_var })
|
||||||
.set_using_internal_features(using_internal_features)
|
.set_using_internal_features(using_internal_features)
|
||||||
.run()
|
.run();
|
||||||
}
|
}
|
||||||
|
return Ok(());
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,8 @@ fn run_compiler(
|
||||||
let exit_code = rustc_driver::catch_with_exit_code(move || {
|
let exit_code = rustc_driver::catch_with_exit_code(move || {
|
||||||
rustc_driver::RunCompiler::new(&args, callbacks)
|
rustc_driver::RunCompiler::new(&args, callbacks)
|
||||||
.set_using_internal_features(using_internal_features)
|
.set_using_internal_features(using_internal_features)
|
||||||
.run()
|
.run();
|
||||||
|
Ok(())
|
||||||
});
|
});
|
||||||
std::process::exit(exit_code)
|
std::process::exit(exit_code)
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ extern crate rustc_interface;
|
||||||
use rustc_interface::interface;
|
use rustc_interface::interface;
|
||||||
|
|
||||||
struct TestCalls<'a> {
|
struct TestCalls<'a> {
|
||||||
count: &'a mut u32
|
count: &'a mut u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl rustc_driver::Callbacks for TestCalls<'_> {
|
impl rustc_driver::Callbacks for TestCalls<'_> {
|
||||||
|
@ -24,8 +24,9 @@ impl rustc_driver::Callbacks for TestCalls<'_> {
|
||||||
fn main() {
|
fn main() {
|
||||||
let mut count = 1;
|
let mut count = 1;
|
||||||
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
|
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
|
||||||
rustc_driver::catch_fatal_errors(|| {
|
rustc_driver::catch_fatal_errors(|| -> interface::Result<()> {
|
||||||
rustc_driver::RunCompiler::new(&args, &mut TestCalls { count: &mut count }).run().ok();
|
rustc_driver::RunCompiler::new(&args, &mut TestCalls { count: &mut count }).run();
|
||||||
|
Ok(())
|
||||||
})
|
})
|
||||||
.ok();
|
.ok();
|
||||||
assert_eq!(count, 2);
|
assert_eq!(count, 2);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue