Rename all ParseSess
variables/fields/lifetimes as psess
.
Existing names for values of this type are `sess`, `parse_sess`, `parse_session`, and `ps`. `sess` is particularly annoying because that's also used for `Session` values, which are often co-located, and it can be difficult to know which type a value named `sess` refers to. (That annoyance is the main motivation for this change.) `psess` is nice and short, which is good for a name used this much. The commit also renames some `parse_sess_created` values as `psess_created`.
This commit is contained in:
parent
4260f7ec67
commit
80d2bdb619
98 changed files with 653 additions and 687 deletions
|
@ -42,12 +42,12 @@ pub struct UnmatchedDelim {
|
|||
pub candidate_span: Option<Span>,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_token_trees<'sess, 'src>(
|
||||
sess: &'sess ParseSess,
|
||||
pub(crate) fn parse_token_trees<'psess, 'src>(
|
||||
psess: &'psess ParseSess,
|
||||
mut src: &'src str,
|
||||
mut start_pos: BytePos,
|
||||
override_span: Option<Span>,
|
||||
) -> Result<TokenStream, Vec<Diag<'sess>>> {
|
||||
) -> Result<TokenStream, Vec<Diag<'psess>>> {
|
||||
// Skip `#!`, if present.
|
||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
|
||||
src = &src[shebang_len..];
|
||||
|
@ -56,7 +56,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
|
|||
|
||||
let cursor = Cursor::new(src);
|
||||
let string_reader = StringReader {
|
||||
sess,
|
||||
psess,
|
||||
start_pos,
|
||||
pos: start_pos,
|
||||
src,
|
||||
|
@ -75,7 +75,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
|
|||
|
||||
let mut buffer = Vec::with_capacity(1);
|
||||
for unmatched in unmatched_delims {
|
||||
if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
|
||||
if let Some(err) = make_unclosed_delims_error(unmatched, psess) {
|
||||
buffer.push(err);
|
||||
}
|
||||
}
|
||||
|
@ -90,8 +90,8 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
|
|||
}
|
||||
}
|
||||
|
||||
struct StringReader<'sess, 'src> {
|
||||
sess: &'sess ParseSess,
|
||||
struct StringReader<'psess, 'src> {
|
||||
psess: &'psess ParseSess,
|
||||
/// Initial position, read-only.
|
||||
start_pos: BytePos,
|
||||
/// The absolute offset within the source_map of the current character.
|
||||
|
@ -107,9 +107,9 @@ struct StringReader<'sess, 'src> {
|
|||
nbsp_is_whitespace: bool,
|
||||
}
|
||||
|
||||
impl<'sess, 'src> StringReader<'sess, 'src> {
|
||||
pub fn dcx(&self) -> &'sess DiagCtxt {
|
||||
&self.sess.dcx
|
||||
impl<'psess, 'src> StringReader<'psess, 'src> {
|
||||
pub fn dcx(&self) -> &'psess DiagCtxt {
|
||||
&self.psess.dcx
|
||||
}
|
||||
|
||||
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
||||
|
@ -176,11 +176,11 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
rustc_lexer::TokenKind::RawIdent => {
|
||||
let sym = nfc_normalize(self.str_from(start + BytePos(2)));
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.symbol_gallery.insert(sym, span);
|
||||
self.psess.symbol_gallery.insert(sym, span);
|
||||
if !sym.can_be_raw() {
|
||||
self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
|
||||
}
|
||||
self.sess.raw_identifier_spans.push(span);
|
||||
self.psess.raw_identifier_spans.push(span);
|
||||
token::Ident(sym, IdentIsRaw::Yes)
|
||||
}
|
||||
rustc_lexer::TokenKind::UnknownPrefix => {
|
||||
|
@ -199,7 +199,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
{
|
||||
let sym = nfc_normalize(self.str_from(start));
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
|
||||
self.psess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
|
||||
.push(span);
|
||||
token::Ident(sym, IdentIsRaw::No)
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
let suffix = if suffix_start < self.pos {
|
||||
let string = self.str_from(suffix_start);
|
||||
if string == "_" {
|
||||
self.sess
|
||||
self.psess
|
||||
.dcx
|
||||
.emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) });
|
||||
None
|
||||
|
@ -338,7 +338,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
fn ident(&self, start: BytePos) -> TokenKind {
|
||||
let sym = nfc_normalize(self.str_from(start));
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.symbol_gallery.insert(sym, span);
|
||||
self.psess.symbol_gallery.insert(sym, span);
|
||||
token::Ident(sym, IdentIsRaw::No)
|
||||
}
|
||||
|
||||
|
@ -350,7 +350,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
let content = self.str_from(content_start);
|
||||
if contains_text_flow_control_chars(content) {
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.buffer_lint_with_diagnostic(
|
||||
self.psess.buffer_lint_with_diagnostic(
|
||||
TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
|
||||
span,
|
||||
ast::CRATE_NODE_ID,
|
||||
|
@ -566,7 +566,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
}
|
||||
|
||||
fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
|
||||
self.sess
|
||||
self.psess
|
||||
.dcx
|
||||
.struct_span_fatal(
|
||||
self.mk_sp(start, self.pos),
|
||||
|
@ -680,7 +680,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
|
|||
self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
|
||||
} else {
|
||||
// Before Rust 2021, only emit a lint for migration.
|
||||
self.sess.buffer_lint_with_diagnostic(
|
||||
self.psess.buffer_lint_with_diagnostic(
|
||||
RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
|
||||
prefix_span,
|
||||
ast::CRATE_NODE_ID,
|
||||
|
|
|
@ -8,18 +8,18 @@ use rustc_ast_pretty::pprust::token_to_string;
|
|||
use rustc_errors::{Applicability, PErr};
|
||||
use rustc_span::symbol::kw;
|
||||
|
||||
pub(super) struct TokenTreesReader<'sess, 'src> {
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
pub(super) struct TokenTreesReader<'psess, 'src> {
|
||||
string_reader: StringReader<'psess, 'src>,
|
||||
/// The "next" token, which has been obtained from the `StringReader` but
|
||||
/// not yet handled by the `TokenTreesReader`.
|
||||
token: Token,
|
||||
diag_info: TokenTreeDiagInfo,
|
||||
}
|
||||
|
||||
impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
||||
impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
|
||||
pub(super) fn parse_all_token_trees(
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
|
||||
string_reader: StringReader<'psess, 'src>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'psess>>>, Vec<UnmatchedDelim>) {
|
||||
let mut tt_reader = TokenTreesReader {
|
||||
string_reader,
|
||||
token: Token::dummy(),
|
||||
|
@ -35,7 +35,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
fn parse_token_trees(
|
||||
&mut self,
|
||||
is_delimited: bool,
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'psess>>>) {
|
||||
// Move past the opening delimiter.
|
||||
let (_, open_spacing) = self.bump(false);
|
||||
|
||||
|
@ -71,9 +71,9 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
}
|
||||
}
|
||||
|
||||
fn eof_err(&mut self) -> PErr<'sess> {
|
||||
fn eof_err(&mut self) -> PErr<'psess> {
|
||||
let msg = "this file contains an unclosed delimiter";
|
||||
let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
|
||||
let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
|
||||
for &(_, sp) in &self.diag_info.open_braces {
|
||||
err.span_label(sp, "unclosed delimiter");
|
||||
self.diag_info.unmatched_delims.push(UnmatchedDelim {
|
||||
|
@ -89,7 +89,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
report_suspicious_mismatch_block(
|
||||
&mut err,
|
||||
&self.diag_info,
|
||||
self.string_reader.sess.source_map(),
|
||||
self.string_reader.psess.source_map(),
|
||||
*delim,
|
||||
)
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
fn parse_token_tree_open_delim(
|
||||
&mut self,
|
||||
open_delim: Delimiter,
|
||||
) -> Result<TokenTree, Vec<PErr<'sess>>> {
|
||||
) -> Result<TokenTree, Vec<PErr<'psess>>> {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.token.span;
|
||||
|
||||
|
@ -115,7 +115,7 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
|
||||
// Expand to cover the entire delimited token tree
|
||||
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
|
||||
let sm = self.string_reader.sess.source_map();
|
||||
let sm = self.string_reader.psess.source_map();
|
||||
|
||||
let close_spacing = match self.token.kind {
|
||||
// Correct delimiter.
|
||||
|
@ -232,11 +232,11 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
fn unclosed_delim_err(
|
||||
&mut self,
|
||||
tts: TokenStream,
|
||||
mut errs: Vec<PErr<'sess>>,
|
||||
) -> Vec<PErr<'sess>> {
|
||||
mut errs: Vec<PErr<'psess>>,
|
||||
) -> Vec<PErr<'psess>> {
|
||||
// If there are unclosed delims, see if there are diff markers and if so, point them
|
||||
// out instead of complaining about the unclosed delims.
|
||||
let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
|
||||
let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None);
|
||||
let mut diff_errs = vec![];
|
||||
// Suggest removing a `{` we think appears in an `if`/`while` condition
|
||||
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
|
||||
|
@ -289,17 +289,17 @@ impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
|||
return errs;
|
||||
}
|
||||
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> {
|
||||
// An unexpected closing delimiter (i.e., there is no
|
||||
// matching opening delimiter).
|
||||
let token_str = token_to_string(&self.token);
|
||||
let msg = format!("unexpected closing delimiter: `{token_str}`");
|
||||
let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
|
||||
let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
|
||||
|
||||
report_suspicious_mismatch_block(
|
||||
&mut err,
|
||||
&self.diag_info,
|
||||
self.string_reader.sess.source_map(),
|
||||
self.string_reader.psess.source_map(),
|
||||
delim,
|
||||
);
|
||||
err.span_label(self.token.span, "unexpected closing delimiter");
|
||||
|
|
|
@ -350,7 +350,7 @@ pub(super) fn check_for_substitution(
|
|||
|
||||
let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
|
||||
let msg = format!("substitution character not found for '{ch}'");
|
||||
reader.sess.dcx.span_bug(span, msg);
|
||||
reader.psess.dcx.span_bug(span, msg);
|
||||
};
|
||||
|
||||
// special help suggestion for "directed" double quotes
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue