Remove all threading through of ErrorGuaranteed from the driver
It was inconsistently done (sometimes even within a single function) and most of the rest of the compiler uses fatal errors instead, which need to be caught using catch_with_exit_code anyway. Using fatal errors instead of ErrorGuaranteed everywhere in the driver simplifies things a bit.
This commit is contained in:
parent
030545d8c3
commit
401dd840ff
19 changed files with 175 additions and 216 deletions
|
@ -99,10 +99,7 @@ impl Expander {
|
|||
/// If this function is intended to be used with command line arguments,
|
||||
/// `argv[0]` must be removed prior to calling it manually.
|
||||
#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
|
||||
pub fn arg_expand_all(
|
||||
early_dcx: &EarlyDiagCtxt,
|
||||
at_args: &[String],
|
||||
) -> Result<Vec<String>, ErrorGuaranteed> {
|
||||
pub fn arg_expand_all(early_dcx: &EarlyDiagCtxt, at_args: &[String]) -> Vec<String> {
|
||||
let mut expander = Expander::default();
|
||||
let mut result = Ok(());
|
||||
for arg in at_args {
|
||||
|
@ -110,7 +107,10 @@ pub fn arg_expand_all(
|
|||
result = Err(early_dcx.early_err(format!("failed to load argument file: {err}")));
|
||||
}
|
||||
}
|
||||
result.map(|()| expander.finish())
|
||||
if let Err(guar) = result {
|
||||
guar.raise_fatal();
|
||||
}
|
||||
expander.finish()
|
||||
}
|
||||
|
||||
/// Gets the raw unprocessed command-line arguments as Unicode strings, without doing any further
|
||||
|
|
|
@ -42,9 +42,7 @@ use rustc_data_structures::profiling::{
|
|||
};
|
||||
use rustc_errors::emitter::stderr_destination;
|
||||
use rustc_errors::registry::Registry;
|
||||
use rustc_errors::{
|
||||
ColorConfig, DiagCtxt, ErrCode, ErrorGuaranteed, FatalError, PResult, markdown,
|
||||
};
|
||||
use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown};
|
||||
use rustc_feature::find_gated_cfg;
|
||||
use rustc_interface::util::{self, get_codegen_backend};
|
||||
use rustc_interface::{Linker, Queries, interface, passes};
|
||||
|
@ -271,14 +269,14 @@ impl<'a> RunCompiler<'a> {
|
|||
}
|
||||
|
||||
/// Parse args and run the compiler.
|
||||
pub fn run(self) -> interface::Result<()> {
|
||||
pub fn run(self) {
|
||||
run_compiler(
|
||||
self.at_args,
|
||||
self.callbacks,
|
||||
self.file_loader,
|
||||
self.make_codegen_backend,
|
||||
self.using_internal_features,
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +288,7 @@ fn run_compiler(
|
|||
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
|
||||
>,
|
||||
using_internal_features: Arc<std::sync::atomic::AtomicBool>,
|
||||
) -> interface::Result<()> {
|
||||
) {
|
||||
let mut default_early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
|
||||
|
||||
// Throw away the first argument, the name of the binary.
|
||||
|
@ -303,9 +301,11 @@ fn run_compiler(
|
|||
// the compiler with @empty_file as argv[0] and no more arguments.
|
||||
let at_args = at_args.get(1..).unwrap_or_default();
|
||||
|
||||
let args = args::arg_expand_all(&default_early_dcx, at_args)?;
|
||||
let args = args::arg_expand_all(&default_early_dcx, at_args);
|
||||
|
||||
let Some(matches) = handle_options(&default_early_dcx, &args) else { return Ok(()) };
|
||||
let Some(matches) = handle_options(&default_early_dcx, &args) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let sopts = config::build_session_options(&mut default_early_dcx, &matches);
|
||||
// fully initialize ice path static once unstable options are available as context
|
||||
|
@ -313,7 +313,7 @@ fn run_compiler(
|
|||
|
||||
if let Some(ref code) = matches.opt_str("explain") {
|
||||
handle_explain(&default_early_dcx, diagnostics_registry(), code, sopts.color);
|
||||
return Ok(());
|
||||
return;
|
||||
}
|
||||
|
||||
let (odir, ofile) = make_output(&matches);
|
||||
|
@ -338,7 +338,7 @@ fn run_compiler(
|
|||
expanded_args: args,
|
||||
};
|
||||
|
||||
let has_input = match make_input(&default_early_dcx, &matches.free)? {
|
||||
let has_input = match make_input(&default_early_dcx, &matches.free) {
|
||||
Some(input) => {
|
||||
config.input = input;
|
||||
true // has input: normal compilation
|
||||
|
@ -358,7 +358,7 @@ fn run_compiler(
|
|||
// printing some information without compiling, or exiting immediately
|
||||
// after parsing, etc.
|
||||
let early_exit = || {
|
||||
if let Some(guar) = sess.dcx().has_errors() { Err(guar) } else { Ok(()) }
|
||||
sess.dcx().abort_if_errors();
|
||||
};
|
||||
|
||||
// This implements `-Whelp`. It should be handled very early, like
|
||||
|
@ -389,22 +389,25 @@ fn run_compiler(
|
|||
}
|
||||
|
||||
let linker = compiler.enter(|queries| {
|
||||
let early_exit = || early_exit().map(|_| None);
|
||||
let early_exit = || {
|
||||
sess.dcx().abort_if_errors();
|
||||
None
|
||||
};
|
||||
|
||||
// Parse the crate root source code (doesn't parse submodules yet)
|
||||
// Everything else is parsed during macro expansion.
|
||||
queries.parse()?;
|
||||
queries.parse();
|
||||
|
||||
// If pretty printing is requested: Figure out the representation, print it and exit
|
||||
if let Some(pp_mode) = sess.opts.pretty {
|
||||
if pp_mode.needs_ast_map() {
|
||||
queries.global_ctxt()?.enter(|tcx| {
|
||||
queries.global_ctxt().enter(|tcx| {
|
||||
tcx.ensure().early_lint_checks(());
|
||||
pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx });
|
||||
passes::write_dep_info(tcx);
|
||||
});
|
||||
} else {
|
||||
let krate = queries.parse()?;
|
||||
let krate = queries.parse();
|
||||
pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing {
|
||||
krate: &*krate.borrow(),
|
||||
});
|
||||
|
@ -423,17 +426,17 @@ fn run_compiler(
|
|||
}
|
||||
|
||||
// Make sure name resolution and macro expansion is run.
|
||||
queries.global_ctxt()?.enter(|tcx| tcx.resolver_for_lowering());
|
||||
queries.global_ctxt().enter(|tcx| tcx.resolver_for_lowering());
|
||||
|
||||
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
|
||||
queries.global_ctxt()?.enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
|
||||
queries.global_ctxt().enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
|
||||
}
|
||||
|
||||
if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
|
||||
return early_exit();
|
||||
}
|
||||
|
||||
queries.global_ctxt()?.enter(|tcx| {
|
||||
queries.global_ctxt().enter(|tcx| {
|
||||
passes::write_dep_info(tcx);
|
||||
|
||||
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||
|
@ -446,23 +449,21 @@ fn run_compiler(
|
|||
return early_exit();
|
||||
}
|
||||
|
||||
tcx.analysis(())?;
|
||||
let _ = tcx.analysis(());
|
||||
|
||||
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
|
||||
return early_exit();
|
||||
}
|
||||
|
||||
Ok(Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)?))
|
||||
Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend))
|
||||
})
|
||||
})?;
|
||||
});
|
||||
|
||||
// Linking is done outside the `compiler.enter()` so that the
|
||||
// `GlobalCtxt` within `Queries` can be freed as early as possible.
|
||||
if let Some(linker) = linker {
|
||||
linker.link(sess, codegen_backend)?;
|
||||
linker.link(sess, codegen_backend);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -495,21 +496,17 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<OutFileNa
|
|||
|
||||
/// Extract input (string or file and optional path) from matches.
|
||||
/// This handles reading from stdin if `-` is provided.
|
||||
fn make_input(
|
||||
early_dcx: &EarlyDiagCtxt,
|
||||
free_matches: &[String],
|
||||
) -> Result<Option<Input>, ErrorGuaranteed> {
|
||||
fn make_input(early_dcx: &EarlyDiagCtxt, free_matches: &[String]) -> Option<Input> {
|
||||
match free_matches {
|
||||
[] => Ok(None), // no input: we will exit early,
|
||||
[] => None, // no input: we will exit early,
|
||||
[ifile] if ifile == "-" => {
|
||||
// read from stdin as `Input::Str`
|
||||
let mut input = String::new();
|
||||
if io::stdin().read_to_string(&mut input).is_err() {
|
||||
// Immediately stop compilation if there was an issue reading
|
||||
// the input (for example if the input stream is not UTF-8).
|
||||
let reported = early_dcx
|
||||
.early_err("couldn't read from stdin, as it did not contain valid UTF-8");
|
||||
return Err(reported);
|
||||
early_dcx
|
||||
.early_fatal("couldn't read from stdin, as it did not contain valid UTF-8");
|
||||
}
|
||||
|
||||
let name = match env::var("UNSTABLE_RUSTDOC_TEST_PATH") {
|
||||
|
@ -525,9 +522,9 @@ fn make_input(
|
|||
Err(_) => FileName::anon_source_code(&input),
|
||||
};
|
||||
|
||||
Ok(Some(Input::Str { name, input }))
|
||||
Some(Input::Str { name, input })
|
||||
}
|
||||
[ifile] => Ok(Some(Input::File(PathBuf::from(ifile)))),
|
||||
[ifile] => Some(Input::File(PathBuf::from(ifile))),
|
||||
[ifile1, ifile2, ..] => early_dcx.early_fatal(format!(
|
||||
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
|
||||
ifile1, ifile2
|
||||
|
@ -662,9 +659,7 @@ fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
|
|||
};
|
||||
}
|
||||
};
|
||||
if compiler.codegen_backend.link(sess, codegen_results, &outputs).is_err() {
|
||||
FatalError.raise();
|
||||
}
|
||||
compiler.codegen_backend.link(sess, codegen_results, &outputs);
|
||||
} else {
|
||||
dcx.emit_fatal(RlinkNotAFile {});
|
||||
}
|
||||
|
@ -1607,7 +1602,8 @@ pub fn main() -> ! {
|
|||
let exit_code = catch_with_exit_code(|| {
|
||||
RunCompiler::new(&args::raw_args(&early_dcx)?, &mut callbacks)
|
||||
.set_using_internal_features(using_internal_features)
|
||||
.run()
|
||||
.run();
|
||||
Ok(())
|
||||
});
|
||||
|
||||
if let Some(format) = callbacks.time_passes {
|
||||
|
|
|
@ -222,8 +222,8 @@ impl<'tcx> PrintExtra<'tcx> {
|
|||
}
|
||||
|
||||
pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) {
|
||||
if ppm.needs_analysis() && ex.tcx().analysis(()).is_err() {
|
||||
FatalError.raise();
|
||||
if ppm.needs_analysis() {
|
||||
let _ = ex.tcx().analysis(());
|
||||
}
|
||||
|
||||
let (src, src_name) = get_source(sess);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue