Auto merge of #125436 - matthiaskrgr:rollup-uijo2ga, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #122665 (Add some tests for public-private dependencies.) - #123623 (Fix OutsideLoop's error suggestion: adding label `'block` for `if` block.) - #125054 (Handle `ReVar` in `note_and_explain_region`) - #125156 (Expand `for_loops_over_fallibles` lint to lint on fallibles behind references.) - #125222 (Migrate `run-make/issue-46239` to `rmake`) - #125316 (Tweak `Spacing` use) - #125392 (Wrap Context.ext in AssertUnwindSafe) - #125417 (self-contained linker: retry linking without `-fuse-ld=lld` on CCs that don't support it) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
39d2f2affd
52 changed files with 931 additions and 158 deletions
|
@ -661,11 +661,11 @@ impl TokenStream {
|
|||
if attr_style == AttrStyle::Inner {
|
||||
vec![
|
||||
TokenTree::token_joint(token::Pound, span),
|
||||
TokenTree::token_alone(token::Not, span),
|
||||
TokenTree::token_joint_hidden(token::Not, span),
|
||||
body,
|
||||
]
|
||||
} else {
|
||||
vec![TokenTree::token_alone(token::Pound, span), body]
|
||||
vec![TokenTree::token_joint_hidden(token::Pound, span), body]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -681,22 +681,40 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
|||
}
|
||||
}
|
||||
|
||||
// The easiest way to implement token stream pretty printing would be to
|
||||
// print each token followed by a single space. But that would produce ugly
|
||||
// output, so we go to some effort to do better.
|
||||
//
|
||||
// First, we track whether each token that appears in source code is
|
||||
// followed by a space, with `Spacing`, and reproduce that in the output.
|
||||
// This works well in a lot of cases. E.g. `stringify!(x + y)` produces
|
||||
// "x + y" and `stringify!(x+y)` produces "x+y".
|
||||
//
|
||||
// But this doesn't work for code produced by proc macros (which have no
|
||||
// original source text representation) nor for code produced by decl
|
||||
// macros (which are tricky because the whitespace after tokens appearing
|
||||
// in macro rules isn't always what you want in the produced output). For
|
||||
// these we mostly use `Spacing::Alone`, which is the conservative choice.
|
||||
//
|
||||
// So we have a backup mechanism for when `Spacing::Alone` occurs between a
|
||||
// pair of tokens: we check if that pair of tokens can obviously go
|
||||
// together without a space between them. E.g. token `x` followed by token
|
||||
// `,` is better printed as `x,` than `x ,`. (Even if the original source
|
||||
// code was `x ,`.)
|
||||
//
|
||||
// Finally, we must be careful about changing the output. Token pretty
|
||||
// printing is used by `stringify!` and `impl Display for
|
||||
// proc_macro::TokenStream`, and some programs rely on the output having a
|
||||
// particular form, even though they shouldn't. In particular, some proc
|
||||
// macros do `format!({stream})` on a token stream and then "parse" the
|
||||
// output with simple string matching that can't handle whitespace changes.
|
||||
// E.g. we have seen cases where a proc macro can handle `a :: b` but not
|
||||
// `a::b`. See #117433 for some examples.
|
||||
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
|
||||
let mut iter = tts.trees().peekable();
|
||||
while let Some(tt) = iter.next() {
|
||||
let spacing = self.print_tt(tt, convert_dollar_crate);
|
||||
if let Some(next) = iter.peek() {
|
||||
// Should we print a space after `tt`? There are two guiding
|
||||
// factors.
|
||||
// - `spacing` is the more important and accurate one. Most
|
||||
// tokens have good spacing information, and
|
||||
// `Joint`/`JointHidden` get used a lot.
|
||||
// - `space_between` is the backup. Code produced by proc
|
||||
// macros has worse spacing information, with no
|
||||
// `JointHidden` usage and too much `Alone` usage, which
|
||||
// would result in over-spaced output such as
|
||||
// `( x () , y . z )`. `space_between` avoids some of the
|
||||
// excess whitespace.
|
||||
if spacing == Spacing::Alone && space_between(tt, next) {
|
||||
self.space();
|
||||
}
|
||||
|
|
|
@ -153,7 +153,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
fn build_panic(&self, expr_str: &str, panic_path: Path) -> P<Expr> {
|
||||
let escaped_expr_str = escape_to_fmt(expr_str);
|
||||
let initial = [
|
||||
TokenTree::token_joint_hidden(
|
||||
TokenTree::token_joint(
|
||||
token::Literal(token::Lit {
|
||||
kind: token::LitKind::Str,
|
||||
symbol: Symbol::intern(&if self.fmt_string.is_empty() {
|
||||
|
@ -172,7 +172,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
|
|||
];
|
||||
let captures = self.capture_decls.iter().flat_map(|cap| {
|
||||
[
|
||||
TokenTree::token_joint_hidden(
|
||||
TokenTree::token_joint(
|
||||
token::Ident(cap.ident.name, IdentIsRaw::No),
|
||||
cap.ident.span,
|
||||
),
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::deriving::generic::*;
|
|||
use crate::errors;
|
||||
use core::ops::ControlFlow;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::visit::walk_list;
|
||||
use rustc_ast::visit::visit_opt;
|
||||
use rustc_ast::{attr, EnumDef, VariantData};
|
||||
use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt};
|
||||
use rustc_span::symbol::Ident;
|
||||
|
@ -224,7 +224,7 @@ impl<'a, 'b> rustc_ast::visit::Visitor<'a> for DetectNonVariantDefaultAttr<'a, '
|
|||
self.visit_ident(v.ident);
|
||||
self.visit_vis(&v.vis);
|
||||
self.visit_variant_data(&v.data);
|
||||
walk_list!(self, visit_anon_const, &v.disr_expr);
|
||||
visit_opt!(self, visit_anon_const, &v.disr_expr);
|
||||
for attr in &v.attrs {
|
||||
rustc_ast::visit::walk_attribute(self, attr);
|
||||
}
|
||||
|
|
|
@ -799,6 +799,27 @@ fn link_natively(
|
|||
continue;
|
||||
}
|
||||
|
||||
// Check if linking failed with an error message that indicates the driver didn't recognize
|
||||
// the `-fuse-ld=lld` option. If so, re-perform the link step without it. This avoids having
|
||||
// to spawn multiple instances on the happy path to do version checking, and ensures things
|
||||
// keep working on the tier 1 baseline of GLIBC 2.17+. That is generally understood as GCCs
|
||||
// circa RHEL/CentOS 7, 4.5 or so, whereas lld support was added in GCC 9.
|
||||
if matches!(flavor, LinkerFlavor::Gnu(Cc::Yes, Lld::Yes))
|
||||
&& unknown_arg_regex.is_match(&out)
|
||||
&& out.contains("-fuse-ld=lld")
|
||||
&& cmd.get_args().iter().any(|e| e.to_string_lossy() == "-fuse-ld=lld")
|
||||
{
|
||||
info!("linker output: {:?}", out);
|
||||
warn!("The linker driver does not support `-fuse-ld=lld`. Retrying without it.");
|
||||
for arg in cmd.take_args() {
|
||||
if arg.to_string_lossy() != "-fuse-ld=lld" {
|
||||
cmd.arg(arg);
|
||||
}
|
||||
}
|
||||
info!("{:?}", &cmd);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Detect '-static-pie' used with an older version of gcc or clang not supporting it.
|
||||
// Fallback from '-static-pie' to '-static' in that case.
|
||||
if matches!(flavor, LinkerFlavor::Gnu(Cc::Yes, _))
|
||||
|
|
|
@ -68,12 +68,15 @@ pub(crate) enum KleeneOp {
|
|||
/// `MetaVarExpr` are "first-class" token trees. Useful for parsing macros.
|
||||
#[derive(Debug, PartialEq, Encodable, Decodable)]
|
||||
enum TokenTree {
|
||||
/// A token. Unlike `tokenstream::TokenTree::Token` this lacks a `Spacing`.
|
||||
/// See the comments about `Spacing` in the `transcribe` function.
|
||||
Token(Token),
|
||||
/// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
|
||||
Delimited(DelimSpan, DelimSpacing, Delimited),
|
||||
/// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
|
||||
Sequence(DelimSpan, SequenceRepetition),
|
||||
/// e.g., `$var`.
|
||||
/// e.g., `$var`. The span covers the leading dollar and the ident. (The span within the ident
|
||||
/// only covers the ident, e.g. `var`.)
|
||||
MetaVar(Span, Ident),
|
||||
/// e.g., `$var:expr`. Only appears on the LHS.
|
||||
MetaVarDecl(Span, Ident /* name to bind */, Option<NonterminalKind>),
|
||||
|
|
|
@ -62,7 +62,10 @@ pub(super) fn parse(
|
|||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
|
||||
let span = match trees.next() {
|
||||
Some(&tokenstream::TokenTree::Token(Token { kind: token::Colon, span }, _)) => {
|
||||
Some(&tokenstream::TokenTree::Token(
|
||||
Token { kind: token::Colon, span: colon_span },
|
||||
_,
|
||||
)) => {
|
||||
match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
|
||||
Some((fragment, _)) => {
|
||||
|
@ -126,10 +129,12 @@ pub(super) fn parse(
|
|||
}
|
||||
_ => token.span,
|
||||
},
|
||||
tree => tree.map_or(span, tokenstream::TokenTree::span),
|
||||
Some(tree) => tree.span(),
|
||||
None => colon_span,
|
||||
}
|
||||
}
|
||||
tree => tree.map_or(start_sp, tokenstream::TokenTree::span),
|
||||
Some(tree) => tree.span(),
|
||||
None => start_sp,
|
||||
};
|
||||
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, None));
|
||||
|
@ -176,7 +181,7 @@ fn parse_tree<'a>(
|
|||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
// `tree` is a `$` token. Look at the next token in `trees`
|
||||
&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _) => {
|
||||
&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => {
|
||||
// FIXME: Handle `Invisible`-delimited groups in a more systematic way
|
||||
// during parsing.
|
||||
let mut next = outer_trees.next();
|
||||
|
@ -209,7 +214,7 @@ fn parse_tree<'a>(
|
|||
err.emit();
|
||||
// Returns early the same read `$` to avoid spanning
|
||||
// unrelated diagnostics that could be performed afterwards
|
||||
return TokenTree::token(token::Dollar, span);
|
||||
return TokenTree::token(token::Dollar, dollar_span);
|
||||
}
|
||||
Ok(elem) => {
|
||||
maybe_emit_macro_metavar_expr_feature(
|
||||
|
@ -251,7 +256,7 @@ fn parse_tree<'a>(
|
|||
// special metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = ident.span.with_lo(span.lo());
|
||||
let span = ident.span.with_lo(dollar_span.lo());
|
||||
if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
|
||||
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
|
||||
} else {
|
||||
|
@ -260,16 +265,19 @@ fn parse_tree<'a>(
|
|||
}
|
||||
|
||||
// `tree` is followed by another `$`. This is an escaped `$`.
|
||||
Some(&tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }, _)) => {
|
||||
Some(&tokenstream::TokenTree::Token(
|
||||
Token { kind: token::Dollar, span: dollar_span2 },
|
||||
_,
|
||||
)) => {
|
||||
if parsing_patterns {
|
||||
span_dollar_dollar_or_metavar_in_the_lhs_err(
|
||||
sess,
|
||||
&Token { kind: token::Dollar, span },
|
||||
&Token { kind: token::Dollar, span: dollar_span2 },
|
||||
);
|
||||
} else {
|
||||
maybe_emit_macro_metavar_expr_feature(features, sess, span);
|
||||
maybe_emit_macro_metavar_expr_feature(features, sess, dollar_span2);
|
||||
}
|
||||
TokenTree::token(token::Dollar, span)
|
||||
TokenTree::token(token::Dollar, dollar_span2)
|
||||
}
|
||||
|
||||
// `tree` is followed by some other token. This is an error.
|
||||
|
@ -281,7 +289,7 @@ fn parse_tree<'a>(
|
|||
}
|
||||
|
||||
// There are no more tokens. Just return the `$` we already have.
|
||||
None => TokenTree::token(token::Dollar, span),
|
||||
None => TokenTree::token(token::Dollar, dollar_span),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -253,8 +253,23 @@ pub(super) fn transcribe<'a>(
|
|||
mbe::TokenTree::MetaVar(mut sp, mut original_ident) => {
|
||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||
// the meta-var.
|
||||
//
|
||||
// We use `Spacing::Alone` everywhere here, because that's the conservative choice
|
||||
// and spacing of declarative macros is tricky. E.g. in this macro:
|
||||
// ```
|
||||
// macro_rules! idents {
|
||||
// ($($a:ident,)*) => { stringify!($($a)*) }
|
||||
// }
|
||||
// ```
|
||||
// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
|
||||
// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
|
||||
// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
|
||||
// producing "xyz", which is bad because it effectively merges tokens.
|
||||
// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
|
||||
// some of the unnecessary whitespace.
|
||||
let ident = MacroRulesNormalizedIdent::new(original_ident);
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
// njn: explain the use of alone here
|
||||
let tt = match cur_matched {
|
||||
MatchedSingle(ParseNtResult::Tt(tt)) => {
|
||||
// `tt`s are emitted into the output stream directly as "raw tokens",
|
||||
|
|
|
@ -309,10 +309,10 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
|
|||
use rustc_ast::token::*;
|
||||
|
||||
// The code below is conservative, using `token_alone`/`Spacing::Alone`
|
||||
// in most places. When the resulting code is pretty-printed by
|
||||
// `print_tts` it ends up with spaces between most tokens, which is
|
||||
// safe but ugly. It's hard in general to do better when working at the
|
||||
// token level.
|
||||
// in most places. It's hard in general to do better when working at
|
||||
// the token level. When the resulting code is pretty-printed by
|
||||
// `print_tts` the `space_between` function helps avoid a lot of
|
||||
// unnecessary whitespace, so the results aren't too bad.
|
||||
let (tree, rustc) = self;
|
||||
match tree {
|
||||
TokenTree::Punct(Punct { ch, joint, span }) => {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
//!
|
||||
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/borrow_check.html
|
||||
|
||||
use rustc_ast::visit::walk_list;
|
||||
use rustc_ast::visit::visit_opt;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
|
@ -168,7 +168,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h
|
|||
hir::StmtKind::Expr(..) | hir::StmtKind::Semi(..) => visitor.visit_stmt(statement),
|
||||
}
|
||||
}
|
||||
walk_list!(visitor, visit_expr, &blk.expr);
|
||||
visit_opt!(visitor, visit_expr, &blk.expr);
|
||||
}
|
||||
|
||||
visitor.cx = prev_cx;
|
||||
|
|
|
@ -173,7 +173,10 @@ pub(super) fn note_and_explain_region<'tcx>(
|
|||
|
||||
ty::ReError(_) => return,
|
||||
|
||||
ty::ReVar(_) | ty::ReBound(..) | ty::ReErased => {
|
||||
// FIXME(#125431): `ReVar` shouldn't reach here.
|
||||
ty::ReVar(_) => (format!("lifetime `{region}`"), alt_span),
|
||||
|
||||
ty::ReBound(..) | ty::ReErased => {
|
||||
bug!("unexpected region for note_and_explain_region: {:?}", region);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -263,7 +263,7 @@ lint_extern_without_abi = extern declarations without an explicit ABI are deprec
|
|||
.help = the default ABI is {$default_abi}
|
||||
|
||||
lint_for_loops_over_fallibles =
|
||||
for loop over {$article} `{$ty}`. This is more readably written as an `if let` statement
|
||||
for loop over {$article} `{$ref_prefix}{$ty}`. This is more readably written as an `if let` statement
|
||||
.suggestion = consider using `if let` to clear intent
|
||||
.remove_next = to iterate over `{$recv_snip}` remove the call to `next`
|
||||
.use_while_let = to check pattern in a loop use `while let`
|
||||
|
|
|
@ -52,14 +52,27 @@ impl<'tcx> LateLintPass<'tcx> for ForLoopsOverFallibles {
|
|||
|
||||
let ty = cx.typeck_results().expr_ty(arg);
|
||||
|
||||
let &ty::Adt(adt, args) = ty.kind() else { return };
|
||||
let (adt, args, ref_mutability) = match ty.kind() {
|
||||
&ty::Adt(adt, args) => (adt, args, None),
|
||||
&ty::Ref(_, ty, mutability) => match ty.kind() {
|
||||
&ty::Adt(adt, args) => (adt, args, Some(mutability)),
|
||||
_ => return,
|
||||
},
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let (article, ty, var) = match adt.did() {
|
||||
did if cx.tcx.is_diagnostic_item(sym::Option, did) && ref_mutability.is_some() => ("a", "Option", "Some"),
|
||||
did if cx.tcx.is_diagnostic_item(sym::Option, did) => ("an", "Option", "Some"),
|
||||
did if cx.tcx.is_diagnostic_item(sym::Result, did) => ("a", "Result", "Ok"),
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let ref_prefix = match ref_mutability {
|
||||
None => "",
|
||||
Some(ref_mutability) => ref_mutability.ref_prefix_str(),
|
||||
};
|
||||
|
||||
let sub = if let Some(recv) = extract_iterator_next_call(cx, arg)
|
||||
&& let Ok(recv_snip) = cx.sess().source_map().span_to_snippet(recv.span)
|
||||
{
|
||||
|
@ -85,7 +98,7 @@ impl<'tcx> LateLintPass<'tcx> for ForLoopsOverFallibles {
|
|||
cx.emit_span_lint(
|
||||
FOR_LOOPS_OVER_FALLIBLES,
|
||||
arg.span,
|
||||
ForLoopsOverFalliblesDiag { article, ty, sub, question_mark, suggestion },
|
||||
ForLoopsOverFalliblesDiag { article, ref_prefix, ty, sub, question_mark, suggestion },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -620,6 +620,7 @@ pub enum PtrNullChecksDiag<'a> {
|
|||
#[diag(lint_for_loops_over_fallibles)]
|
||||
pub struct ForLoopsOverFalliblesDiag<'a> {
|
||||
pub article: &'static str,
|
||||
pub ref_prefix: &'static str,
|
||||
pub ty: &'static str,
|
||||
#[subdiagnostic]
|
||||
pub sub: ForLoopsOverFalliblesLoopSub<'a>,
|
||||
|
|
|
@ -925,7 +925,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||
for subpattern in prefix.iter() {
|
||||
self.visit_primary_bindings(subpattern, pattern_user_ty.clone().index(), f);
|
||||
}
|
||||
for subpattern in slice {
|
||||
if let Some(subpattern) = slice {
|
||||
self.visit_primary_bindings(
|
||||
subpattern,
|
||||
pattern_user_ty.clone().subslice(from, to),
|
||||
|
|
|
@ -1100,7 +1100,7 @@ pub struct BreakInsideCoroutine<'a> {
|
|||
pub struct OutsideLoop<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub spans: Vec<Span>,
|
||||
pub name: &'a str,
|
||||
pub is_break: bool,
|
||||
#[subdiagnostic]
|
||||
|
@ -1112,7 +1112,7 @@ pub struct OutsideLoopSuggestion {
|
|||
#[suggestion_part(code = "'block: ")]
|
||||
pub block_span: Span,
|
||||
#[suggestion_part(code = " 'block")]
|
||||
pub break_span: Span,
|
||||
pub break_spans: Vec<Span>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use Context::*;
|
||||
|
||||
use rustc_hir as hir;
|
||||
|
@ -25,22 +27,55 @@ enum Context {
|
|||
Closure(Span),
|
||||
Coroutine { coroutine_span: Span, kind: hir::CoroutineDesugaring, source: hir::CoroutineSource },
|
||||
UnlabeledBlock(Span),
|
||||
UnlabeledIfBlock(Span),
|
||||
LabeledBlock,
|
||||
Constant,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Clone)]
|
||||
struct BlockInfo {
|
||||
name: String,
|
||||
spans: Vec<Span>,
|
||||
suggs: Vec<Span>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum BreakContextKind {
|
||||
Break,
|
||||
Continue,
|
||||
}
|
||||
|
||||
impl fmt::Display for BreakContextKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
BreakContextKind::Break => "break",
|
||||
BreakContextKind::Continue => "continue",
|
||||
}
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CheckLoopVisitor<'a, 'tcx> {
|
||||
sess: &'a Session,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
cx: Context,
|
||||
// Keep track of a stack of contexts, so that suggestions
|
||||
// are not made for contexts where it would be incorrect,
|
||||
// such as adding a label for an `if`.
|
||||
// e.g. `if 'foo: {}` would be incorrect.
|
||||
cx_stack: Vec<Context>,
|
||||
block_breaks: BTreeMap<Span, BlockInfo>,
|
||||
}
|
||||
|
||||
fn check_mod_loops(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
|
||||
tcx.hir().visit_item_likes_in_module(
|
||||
module_def_id,
|
||||
&mut CheckLoopVisitor { sess: tcx.sess, tcx, cx: Normal },
|
||||
);
|
||||
let mut check = CheckLoopVisitor {
|
||||
sess: tcx.sess,
|
||||
tcx,
|
||||
cx_stack: vec![Normal],
|
||||
block_breaks: Default::default(),
|
||||
};
|
||||
tcx.hir().visit_item_likes_in_module(module_def_id, &mut check);
|
||||
check.report_outside_loop_error();
|
||||
}
|
||||
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
|
@ -83,6 +118,45 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
|||
|
||||
fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
|
||||
match e.kind {
|
||||
hir::ExprKind::If(cond, then, else_opt) => {
|
||||
self.visit_expr(cond);
|
||||
|
||||
let get_block = |ck_loop: &CheckLoopVisitor<'a, 'hir>,
|
||||
expr: &hir::Expr<'hir>|
|
||||
-> Option<&hir::Block<'hir>> {
|
||||
if let hir::ExprKind::Block(b, None) = expr.kind
|
||||
&& matches!(
|
||||
ck_loop.cx_stack.last(),
|
||||
Some(&Normal)
|
||||
| Some(&Constant)
|
||||
| Some(&UnlabeledBlock(_))
|
||||
| Some(&UnlabeledIfBlock(_))
|
||||
)
|
||||
{
|
||||
Some(b)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(b) = get_block(self, then) {
|
||||
self.with_context(UnlabeledIfBlock(b.span.shrink_to_lo()), |v| {
|
||||
v.visit_block(b)
|
||||
});
|
||||
} else {
|
||||
self.visit_expr(then);
|
||||
}
|
||||
|
||||
if let Some(else_expr) = else_opt {
|
||||
if let Some(b) = get_block(self, else_expr) {
|
||||
self.with_context(UnlabeledIfBlock(b.span.shrink_to_lo()), |v| {
|
||||
v.visit_block(b)
|
||||
});
|
||||
} else {
|
||||
self.visit_expr(else_expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
hir::ExprKind::Loop(ref b, _, source, _) => {
|
||||
self.with_context(Loop(source), |v| v.visit_block(b));
|
||||
}
|
||||
|
@ -101,11 +175,14 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
|||
hir::ExprKind::Block(ref b, Some(_label)) => {
|
||||
self.with_context(LabeledBlock, |v| v.visit_block(b));
|
||||
}
|
||||
hir::ExprKind::Block(ref b, None) if matches!(self.cx, Fn) => {
|
||||
hir::ExprKind::Block(ref b, None) if matches!(self.cx_stack.last(), Some(&Fn)) => {
|
||||
self.with_context(Normal, |v| v.visit_block(b));
|
||||
}
|
||||
hir::ExprKind::Block(ref b, None)
|
||||
if matches!(self.cx, Normal | Constant | UnlabeledBlock(_)) =>
|
||||
if matches!(
|
||||
self.cx_stack.last(),
|
||||
Some(&Normal) | Some(&Constant) | Some(&UnlabeledBlock(_))
|
||||
) =>
|
||||
{
|
||||
self.with_context(UnlabeledBlock(b.span.shrink_to_lo()), |v| v.visit_block(b));
|
||||
}
|
||||
|
@ -178,7 +255,12 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
|||
Some(label) => sp_lo.with_hi(label.ident.span.hi()),
|
||||
None => sp_lo.shrink_to_lo(),
|
||||
};
|
||||
self.require_break_cx("break", e.span, label_sp);
|
||||
self.require_break_cx(
|
||||
BreakContextKind::Break,
|
||||
e.span,
|
||||
label_sp,
|
||||
self.cx_stack.len() - 1,
|
||||
);
|
||||
}
|
||||
hir::ExprKind::Continue(destination) => {
|
||||
self.require_label_in_labeled_block(e.span, &destination, "continue");
|
||||
|
@ -200,7 +282,12 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
|
|||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
self.require_break_cx("continue", e.span, e.span)
|
||||
self.require_break_cx(
|
||||
BreakContextKind::Continue,
|
||||
e.span,
|
||||
e.span,
|
||||
self.cx_stack.len() - 1,
|
||||
)
|
||||
}
|
||||
_ => intravisit::walk_expr(self, e),
|
||||
}
|
||||
|
@ -212,18 +299,26 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
|
|||
where
|
||||
F: FnOnce(&mut CheckLoopVisitor<'a, 'hir>),
|
||||
{
|
||||
let old_cx = self.cx;
|
||||
self.cx = cx;
|
||||
self.cx_stack.push(cx);
|
||||
f(self);
|
||||
self.cx = old_cx;
|
||||
self.cx_stack.pop();
|
||||
}
|
||||
|
||||
fn require_break_cx(&self, name: &str, span: Span, break_span: Span) {
|
||||
let is_break = name == "break";
|
||||
match self.cx {
|
||||
fn require_break_cx(
|
||||
&mut self,
|
||||
br_cx_kind: BreakContextKind,
|
||||
span: Span,
|
||||
break_span: Span,
|
||||
cx_pos: usize,
|
||||
) {
|
||||
match self.cx_stack[cx_pos] {
|
||||
LabeledBlock | Loop(_) => {}
|
||||
Closure(closure_span) => {
|
||||
self.sess.dcx().emit_err(BreakInsideClosure { span, closure_span, name });
|
||||
self.sess.dcx().emit_err(BreakInsideClosure {
|
||||
span,
|
||||
closure_span,
|
||||
name: &br_cx_kind.to_string(),
|
||||
});
|
||||
}
|
||||
Coroutine { coroutine_span, kind, source } => {
|
||||
let kind = match kind {
|
||||
|
@ -239,17 +334,32 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
|
|||
self.sess.dcx().emit_err(BreakInsideCoroutine {
|
||||
span,
|
||||
coroutine_span,
|
||||
name,
|
||||
name: &br_cx_kind.to_string(),
|
||||
kind,
|
||||
source,
|
||||
});
|
||||
}
|
||||
UnlabeledBlock(block_span) if is_break && block_span.eq_ctxt(break_span) => {
|
||||
let suggestion = Some(OutsideLoopSuggestion { block_span, break_span });
|
||||
self.sess.dcx().emit_err(OutsideLoop { span, name, is_break, suggestion });
|
||||
UnlabeledBlock(block_span)
|
||||
if br_cx_kind == BreakContextKind::Break && block_span.eq_ctxt(break_span) =>
|
||||
{
|
||||
let block = self.block_breaks.entry(block_span).or_insert_with(|| BlockInfo {
|
||||
name: br_cx_kind.to_string(),
|
||||
spans: vec![],
|
||||
suggs: vec![],
|
||||
});
|
||||
block.spans.push(span);
|
||||
block.suggs.push(break_span);
|
||||
}
|
||||
Normal | Constant | Fn | UnlabeledBlock(_) => {
|
||||
self.sess.dcx().emit_err(OutsideLoop { span, name, is_break, suggestion: None });
|
||||
UnlabeledIfBlock(_) if br_cx_kind == BreakContextKind::Break => {
|
||||
self.require_break_cx(br_cx_kind, span, break_span, cx_pos - 1);
|
||||
}
|
||||
Normal | Constant | Fn | UnlabeledBlock(_) | UnlabeledIfBlock(_) => {
|
||||
self.sess.dcx().emit_err(OutsideLoop {
|
||||
spans: vec![span],
|
||||
name: &br_cx_kind.to_string(),
|
||||
is_break: br_cx_kind == BreakContextKind::Break,
|
||||
suggestion: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -261,7 +371,7 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
|
|||
cf_type: &str,
|
||||
) -> bool {
|
||||
if !span.is_desugaring(DesugaringKind::QuestionMark)
|
||||
&& self.cx == LabeledBlock
|
||||
&& self.cx_stack.last() == Some(&LabeledBlock)
|
||||
&& label.label.is_none()
|
||||
{
|
||||
self.sess.dcx().emit_err(UnlabeledInLabeledBlock { span, cf_type });
|
||||
|
@ -269,4 +379,18 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
|
|||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn report_outside_loop_error(&mut self) {
|
||||
for (s, block) in &self.block_breaks {
|
||||
self.sess.dcx().emit_err(OutsideLoop {
|
||||
spans: block.spans.clone(),
|
||||
name: &block.name,
|
||||
is_break: true,
|
||||
suggestion: Some(OutsideLoopSuggestion {
|
||||
block_span: *s,
|
||||
break_spans: block.suggs.clone(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::{Module, ModuleOrUniformRoot, NameBinding, ParentScope, PathResult};
|
|||
use crate::{ResolutionError, Resolver, Segment, UseError};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::visit::{walk_list, AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::visit::{visit_opt, walk_list, AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::*;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||
use rustc_errors::{codes::*, Applicability, DiagArgValue, IntoDiagArg, StashKey};
|
||||
|
@ -3280,7 +3280,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
fn resolve_local(&mut self, local: &'ast Local) {
|
||||
debug!("resolving local ({:?})", local);
|
||||
// Resolve the type.
|
||||
walk_list!(self, visit_ty, &local.ty);
|
||||
visit_opt!(self, visit_ty, &local.ty);
|
||||
|
||||
// Resolve the initializer.
|
||||
if let Some((init, els)) = local.kind.init_else_opt() {
|
||||
|
@ -3479,8 +3479,8 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
|||
fn resolve_arm(&mut self, arm: &'ast Arm) {
|
||||
self.with_rib(ValueNS, RibKind::Normal, |this| {
|
||||
this.resolve_pattern_top(&arm.pat, PatternSource::Match);
|
||||
walk_list!(this, visit_expr, &arm.guard);
|
||||
walk_list!(this, visit_expr, &arm.body);
|
||||
visit_opt!(this, visit_expr, &arm.guard);
|
||||
visit_opt!(this, visit_expr, &arm.body);
|
||||
});
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue