
`MacArgs` is an enum with three variants: `Empty`, `Delimited`, and `Eq`. It's used in two ways: - For representing attribute macro arguments (e.g. in `AttrItem`), where all three variants are used. - For representing function-like macros (e.g. in `MacCall` and `MacroDef`), where only the `Delimited` variant is used. In other words, `MacArgs` is used in two quite different places due to them having partial overlap. I find this makes the code hard to read. It also leads to various unreachable code paths, and allows invalid values (such as accidentally using `MacArgs::Empty` in a `MacCall`). This commit splits `MacArgs` in two: - `DelimArgs` is a new struct just for the "delimited arguments" case. It is now used in `MacCall` and `MacroDef`. - `AttrArgs` is a renaming of the old `MacArgs` enum for the attribute macro case. Its `Delimited` variant now contains a `DelimArgs`. Various other related things are renamed as well. These changes make the code clearer, avoids several unreachable paths, and disallows the invalid values.
295 lines
9.9 KiB
Rust
295 lines
9.9 KiB
Rust
//! The main parser interface.
|
|
|
|
#![feature(array_windows)]
|
|
#![feature(box_patterns)]
|
|
#![feature(if_let_guard)]
|
|
#![feature(iter_intersperse)]
|
|
#![feature(let_chains)]
|
|
#![feature(never_type)]
|
|
#![feature(rustc_attrs)]
|
|
#![recursion_limit = "256"]
|
|
|
|
#[macro_use]
|
|
extern crate tracing;
|
|
|
|
use rustc_ast as ast;
|
|
use rustc_ast::token;
|
|
use rustc_ast::tokenstream::TokenStream;
|
|
use rustc_ast::{AttrItem, Attribute, MetaItem};
|
|
use rustc_ast_pretty::pprust;
|
|
use rustc_data_structures::sync::Lrc;
|
|
use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
|
|
use rustc_session::parse::ParseSess;
|
|
use rustc_span::{FileName, SourceFile, Span};
|
|
|
|
use std::path::Path;
|
|
|
|
pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
|
|
|
|
#[macro_use]
|
|
pub mod parser;
|
|
use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
|
|
pub mod lexer;
|
|
pub mod validate_attr;
|
|
|
|
mod errors;
|
|
|
|
// A bunch of utility functions of the form `parse_<thing>_from_<source>`
|
|
// where <thing> includes crate, expr, item, stmt, tts, and one that
|
|
// uses a HOF to parse anything, and <source> includes file and
|
|
// `source_str`.
|
|
|
|
/// A variant of 'panictry!' that works on a `Vec<Diagnostic>` instead of a single
|
|
/// `DiagnosticBuilder`.
|
|
macro_rules! panictry_buffer {
|
|
($handler:expr, $e:expr) => {{
|
|
use rustc_errors::FatalError;
|
|
use std::result::Result::{Err, Ok};
|
|
match $e {
|
|
Ok(e) => e,
|
|
Err(errs) => {
|
|
for mut e in errs {
|
|
$handler.emit_diagnostic(&mut e);
|
|
}
|
|
FatalError.raise()
|
|
}
|
|
}
|
|
}};
|
|
}
|
|
|
|
pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> {
|
|
let mut parser = new_parser_from_file(sess, input, None);
|
|
parser.parse_crate_mod()
|
|
}
|
|
|
|
pub fn parse_crate_attrs_from_file<'a>(
|
|
input: &Path,
|
|
sess: &'a ParseSess,
|
|
) -> PResult<'a, ast::AttrVec> {
|
|
let mut parser = new_parser_from_file(sess, input, None);
|
|
parser.parse_inner_attributes()
|
|
}
|
|
|
|
pub fn parse_crate_from_source_str(
|
|
name: FileName,
|
|
source: String,
|
|
sess: &ParseSess,
|
|
) -> PResult<'_, ast::Crate> {
|
|
new_parser_from_source_str(sess, name, source).parse_crate_mod()
|
|
}
|
|
|
|
pub fn parse_crate_attrs_from_source_str(
|
|
name: FileName,
|
|
source: String,
|
|
sess: &ParseSess,
|
|
) -> PResult<'_, ast::AttrVec> {
|
|
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
|
|
}
|
|
|
|
pub fn parse_stream_from_source_str(
|
|
name: FileName,
|
|
source: String,
|
|
sess: &ParseSess,
|
|
override_span: Option<Span>,
|
|
) -> TokenStream {
|
|
let (stream, mut errors) =
|
|
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
|
|
emit_unclosed_delims(&mut errors, &sess);
|
|
stream
|
|
}
|
|
|
|
/// Creates a new parser from a source string.
|
|
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
|
|
panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
|
|
}
|
|
|
|
/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
|
|
/// token stream.
|
|
pub fn maybe_new_parser_from_source_str(
|
|
sess: &ParseSess,
|
|
name: FileName,
|
|
source: String,
|
|
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
|
maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
|
|
}
|
|
|
|
/// Creates a new parser, handling errors as appropriate if the file doesn't exist.
|
|
/// If a span is given, that is used on an error as the source of the problem.
|
|
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
|
|
source_file_to_parser(sess, file_to_source_file(sess, path, sp))
|
|
}
|
|
|
|
/// Given a session and a `source_file`, returns a parser.
|
|
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
|
|
panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
|
|
}
|
|
|
|
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing the
|
|
/// initial token stream.
|
|
fn maybe_source_file_to_parser(
|
|
sess: &ParseSess,
|
|
source_file: Lrc<SourceFile>,
|
|
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
|
let end_pos = source_file.end_pos;
|
|
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
|
|
let mut parser = stream_to_parser(sess, stream, None);
|
|
parser.unclosed_delims = unclosed_delims;
|
|
if parser.token == token::Eof {
|
|
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
|
|
}
|
|
|
|
Ok(parser)
|
|
}
|
|
|
|
// Base abstractions
|
|
|
|
/// Given a session and a path and an optional span (for error reporting),
|
|
/// add the path to the session's source_map and return the new source_file or
|
|
/// error when a file can't be read.
|
|
fn try_file_to_source_file(
|
|
sess: &ParseSess,
|
|
path: &Path,
|
|
spanopt: Option<Span>,
|
|
) -> Result<Lrc<SourceFile>, Diagnostic> {
|
|
sess.source_map().load_file(path).map_err(|e| {
|
|
let msg = format!("couldn't read {}: {}", path.display(), e);
|
|
let mut diag = Diagnostic::new(Level::Fatal, &msg);
|
|
if let Some(sp) = spanopt {
|
|
diag.set_span(sp);
|
|
}
|
|
diag
|
|
})
|
|
}
|
|
|
|
/// Given a session and a path and an optional span (for error reporting),
|
|
/// adds the path to the session's `source_map` and returns the new `source_file`.
|
|
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
|
|
match try_file_to_source_file(sess, path, spanopt) {
|
|
Ok(source_file) => source_file,
|
|
Err(mut d) => {
|
|
sess.span_diagnostic.emit_diagnostic(&mut d);
|
|
FatalError.raise();
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Given a `source_file`, produces a sequence of token trees.
|
|
pub fn source_file_to_stream(
|
|
sess: &ParseSess,
|
|
source_file: Lrc<SourceFile>,
|
|
override_span: Option<Span>,
|
|
) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
|
|
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
|
|
}
|
|
|
|
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
|
|
/// parsing the token stream.
|
|
pub fn maybe_file_to_stream(
|
|
sess: &ParseSess,
|
|
source_file: Lrc<SourceFile>,
|
|
override_span: Option<Span>,
|
|
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
|
|
let src = source_file.src.as_ref().unwrap_or_else(|| {
|
|
sess.span_diagnostic.bug(&format!(
|
|
"cannot lex `source_file` without source: {}",
|
|
sess.source_map().filename_for_diagnostics(&source_file.name)
|
|
));
|
|
});
|
|
|
|
let (token_trees, unmatched_braces) =
|
|
lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
|
|
|
|
match token_trees {
|
|
Ok(stream) => Ok((stream, unmatched_braces)),
|
|
Err(err) => {
|
|
let mut buffer = Vec::with_capacity(1);
|
|
err.buffer(&mut buffer);
|
|
// Not using `emit_unclosed_delims` to use `db.buffer`
|
|
for unmatched in unmatched_braces {
|
|
if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
|
|
err.buffer(&mut buffer);
|
|
}
|
|
}
|
|
Err(buffer)
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Given a stream and the `ParseSess`, produces a parser.
|
|
pub fn stream_to_parser<'a>(
|
|
sess: &'a ParseSess,
|
|
stream: TokenStream,
|
|
subparser_name: Option<&'static str>,
|
|
) -> Parser<'a> {
|
|
Parser::new(sess, stream, false, subparser_name)
|
|
}
|
|
|
|
/// Runs the given subparser `f` on the tokens of the given `attr`'s item.
|
|
pub fn parse_in<'a, T>(
|
|
sess: &'a ParseSess,
|
|
tts: TokenStream,
|
|
name: &'static str,
|
|
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
|
) -> PResult<'a, T> {
|
|
let mut parser = Parser::new(sess, tts, false, Some(name));
|
|
let result = f(&mut parser)?;
|
|
if parser.token != token::Eof {
|
|
parser.unexpected()?;
|
|
}
|
|
Ok(result)
|
|
}
|
|
|
|
pub fn fake_token_stream_for_item(sess: &ParseSess, item: &ast::Item) -> TokenStream {
|
|
let source = pprust::item_to_string(item);
|
|
let filename = FileName::macro_expansion_source_code(&source);
|
|
parse_stream_from_source_str(filename, source, sess, Some(item.span))
|
|
}
|
|
|
|
pub fn fake_token_stream_for_crate(sess: &ParseSess, krate: &ast::Crate) -> TokenStream {
|
|
let source = pprust::crate_to_string_for_macros(krate);
|
|
let filename = FileName::macro_expansion_source_code(&source);
|
|
parse_stream_from_source_str(filename, source, sess, Some(krate.spans.inner_span))
|
|
}
|
|
|
|
pub fn parse_cfg_attr(
|
|
attr: &Attribute,
|
|
parse_sess: &ParseSess,
|
|
) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
|
|
match attr.get_normal_item().args {
|
|
ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
|
|
if !tokens.is_empty() =>
|
|
{
|
|
let msg = "wrong `cfg_attr` delimiters";
|
|
crate::validate_attr::check_meta_bad_delim(parse_sess, dspan, delim, msg);
|
|
match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
|
|
Ok(r) => return Some(r),
|
|
Err(mut e) => {
|
|
e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
|
|
.note(CFG_ATTR_NOTE_REF)
|
|
.emit();
|
|
}
|
|
}
|
|
}
|
|
_ => error_malformed_cfg_attr_missing(attr.span, parse_sess),
|
|
}
|
|
None
|
|
}
|
|
|
|
const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
|
|
const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
|
|
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
|
#the-cfg_attr-attribute>";
|
|
|
|
fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) {
|
|
parse_sess
|
|
.span_diagnostic
|
|
.struct_span_err(span, "malformed `cfg_attr` attribute input")
|
|
.span_suggestion(
|
|
span,
|
|
"missing condition and attribute",
|
|
CFG_ATTR_GRAMMAR_HELP,
|
|
Applicability::HasPlaceholders,
|
|
)
|
|
.note(CFG_ATTR_NOTE_REF)
|
|
.emit();
|
|
}
|