Make top-level rustc_parse functions fallible.

Currently we have an awkward mix of fallible and infallible functions:
```
       new_parser_from_source_str
 maybe_new_parser_from_source_str
       new_parser_from_file
(maybe_new_parser_from_file)        // missing
      (new_parser_from_source_file) // missing
 maybe_new_parser_from_source_file
       source_str_to_stream
 maybe_source_file_to_stream
```
We could add the two missing functions, but instead this commit removes
of all the infallible ones and renames the fallible ones leaving us with
these which are all fallible:
```
new_parser_from_source_str
new_parser_from_file
new_parser_from_source_file
source_str_to_stream
source_file_to_stream
```
This requires making `unwrap_or_emit_fatal` public so callers of
formerly infallible functions can still work.

This does make some of the call sites slightly more verbose, but I think
it's worth it for the simpler API. Also, there are two `catch_unwind`
calls and one `catch_fatal_errors` call in this diff that become
removable thanks this change. (I will do that in a follow-up PR.)
This commit is contained in:
Nicholas Nethercote 2024-05-31 15:43:18 +10:00
parent 264dbe4d81
commit b9037339cb
16 changed files with 105 additions and 75 deletions

View file

@ -35,7 +35,7 @@ mod errors;
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
// Unwrap the result if `Ok`, otherwise emit the diagnostics and abort.
fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
pub fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
match expr {
Ok(expr) => expr,
Err(errs) => {
@ -47,25 +47,28 @@ fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
}
}
/// Creates a new parser from a source string.
pub fn new_parser_from_source_str(psess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
unwrap_or_emit_fatal(maybe_new_parser_from_source_str(psess, name, source))
}
/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
/// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
/// when they are dropped.
pub fn maybe_new_parser_from_source_str(
/// Creates a new parser from a source string. On failure, the errors must be consumed via
/// `unwrap_or_emit_fatal`, `emit`, `cancel`, etc., otherwise a panic will occur when they are
/// dropped.
pub fn new_parser_from_source_str(
psess: &ParseSess,
name: FileName,
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
maybe_new_parser_from_source_file(psess, psess.source_map().new_source_file(name, source))
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file)
}
/// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on
/// an error as the source of the problem.
pub fn new_parser_from_file<'a>(psess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
/// Creates a new parser from a filename. On failure, the errors must be consumed via
/// `unwrap_or_emit_fatal`, `emit`, `cancel`, etc., otherwise a panic will occur when they are
/// dropped.
///
/// If a span is given, that is used on an error as the source of the problem.
pub fn new_parser_from_file<'a>(
psess: &'a ParseSess,
path: &Path,
sp: Option<Span>,
) -> Result<Parser<'a>, Vec<Diag<'a>>> {
let source_file = psess.source_map().load_file(path).unwrap_or_else(|e| {
let msg = format!("couldn't read {}: {}", path.display(), e);
let mut err = psess.dcx.struct_fatal(msg);
@ -74,23 +77,21 @@ pub fn new_parser_from_file<'a>(psess: &'a ParseSess, path: &Path, sp: Option<Sp
}
err.emit();
});
unwrap_or_emit_fatal(maybe_new_parser_from_source_file(psess, source_file))
new_parser_from_source_file(psess, source_file)
}
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
/// the initial token stream.
fn maybe_new_parser_from_source_file(
fn new_parser_from_source_file(
psess: &ParseSess,
source_file: Lrc<SourceFile>,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = maybe_source_file_to_stream(psess, source_file, None)?;
let stream = source_file_to_stream(psess, source_file, None)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
}
Ok(parser)
}
@ -99,14 +100,14 @@ pub fn source_str_to_stream(
name: FileName,
source: String,
override_span: Option<Span>,
) -> TokenStream {
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
unwrap_or_emit_fatal(maybe_source_file_to_stream(psess, source_file, override_span))
source_file_to_stream(psess, source_file, override_span)
}
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
/// parsing the token stream.
fn maybe_source_file_to_stream<'psess>(
fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
@ -139,13 +140,18 @@ pub fn parse_in<'a, T>(
pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream {
let source = pprust::item_to_string(item);
let filename = FileName::macro_expansion_source_code(&source);
source_str_to_stream(psess, filename, source, Some(item.span))
unwrap_or_emit_fatal(source_str_to_stream(psess, filename, source, Some(item.span)))
}
pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream {
let source = pprust::crate_to_string_for_macros(krate);
let filename = FileName::macro_expansion_source_code(&source);
source_str_to_stream(psess, filename, source, Some(krate.spans.inner_span))
unwrap_or_emit_fatal(source_str_to_stream(
psess,
filename,
source,
Some(krate.spans.inner_span),
))
}
pub fn parse_cfg_attr(

View file

@ -1,5 +1,7 @@
use crate::parser::ForceCollect;
use crate::{new_parser_from_source_str, parser::Parser, source_str_to_stream};
use crate::{
new_parser_from_source_str, parser::Parser, source_str_to_stream, unwrap_or_emit_fatal,
};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token};
@ -29,7 +31,11 @@ fn psess() -> ParseSess {
/// Map string to parser (via tts).
fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
new_parser_from_source_str(psess, PathBuf::from("bogofile").into(), source_str)
unwrap_or_emit_fatal(new_parser_from_source_str(
psess,
PathBuf::from("bogofile").into(),
source_str,
))
}
fn create_test_handler() -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) {
@ -82,7 +88,12 @@ where
/// Maps a string to tts, using a made-up filename.
pub(crate) fn string_to_stream(source_str: String) -> TokenStream {
let psess = psess();
source_str_to_stream(&psess, PathBuf::from("bogofile").into(), source_str, None)
unwrap_or_emit_fatal(source_str_to_stream(
&psess,
PathBuf::from("bogofile").into(),
source_str,
None,
))
}
/// Parses a string, returns a crate.
@ -1064,7 +1075,8 @@ fn parse_item_from_source_str(
source: String,
psess: &ParseSess,
) -> PResult<'_, Option<P<ast::Item>>> {
new_parser_from_source_str(psess, name, source).parse_item(ForceCollect::No)
unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source))
.parse_item(ForceCollect::No)
}
// Produces a `rustc_span::span`.
@ -1345,7 +1357,7 @@ fn ttdelim_span() {
source: String,
psess: &ParseSess,
) -> PResult<'_, P<ast::Expr>> {
new_parser_from_source_str(psess, name, source).parse_expr()
unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source)).parse_expr()
}
create_default_session_globals_then(|| {