Auto merge of #119569 - matthiaskrgr:rollup-4packja, r=matthiaskrgr
Rollup of 10 pull requests Successful merges: - #118521 (Enable address sanitizer for MSVC targets using INFERASANLIBS linker flag) - #119026 (std::net::bind using -1 for openbsd which in turn sets it to somaxconn.) - #119195 (Make named_asm_labels lint not trigger on unicode and trigger on format args) - #119204 (macro_rules: Less hacky heuristic for using `tt` metavariable spans) - #119362 (Make `derive(Trait)` suggestion more accurate) - #119397 (Recover parentheses in range patterns) - #119417 (Uplift some miscellaneous coroutine-specific machinery into `check_closure`) - #119539 (Fix typos) - #119540 (Don't synthesize host effect args inside trait object types) - #119555 (Add codegen test for RVO on MaybeUninit) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
f688dd684f
57 changed files with 765 additions and 352 deletions
|
@ -26,7 +26,7 @@ use rustc_span::{sym, Span, Symbol, DUMMY_SP};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::{cmp, fmt, iter, mem};
|
use std::{cmp, fmt, iter};
|
||||||
|
|
||||||
/// When the main Rust parser encounters a syntax-extension invocation, it
|
/// When the main Rust parser encounters a syntax-extension invocation, it
|
||||||
/// parses the arguments to the invocation as a token tree. This is a very
|
/// parses the arguments to the invocation as a token tree. This is a very
|
||||||
|
@ -81,14 +81,6 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Modify the `TokenTree`'s span in-place.
|
|
||||||
pub fn set_span(&mut self, span: Span) {
|
|
||||||
match self {
|
|
||||||
TokenTree::Token(token, _) => token.span = span,
|
|
||||||
TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a `TokenTree::Token` with alone spacing.
|
/// Create a `TokenTree::Token` with alone spacing.
|
||||||
pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
|
pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
|
||||||
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
|
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
|
||||||
|
@ -461,19 +453,6 @@ impl TokenStream {
|
||||||
t1.next().is_none() && t2.next().is_none()
|
t1.next().is_none() && t2.next().is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Applies the supplied function to each `TokenTree` and its index in `self`, returning a new `TokenStream`
|
|
||||||
///
|
|
||||||
/// It is equivalent to `TokenStream::new(self.trees().cloned().enumerate().map(|(i, tt)| f(i, tt)).collect())`.
|
|
||||||
pub fn map_enumerated_owned(
|
|
||||||
mut self,
|
|
||||||
mut f: impl FnMut(usize, TokenTree) -> TokenTree,
|
|
||||||
) -> TokenStream {
|
|
||||||
let owned = Lrc::make_mut(&mut self.0); // clone if necessary
|
|
||||||
// rely on vec's in-place optimizations to avoid another allocation
|
|
||||||
*owned = mem::take(owned).into_iter().enumerate().map(|(i, tree)| f(i, tree)).collect();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a token stream containing a single token with alone spacing. The
|
/// Create a token stream containing a single token with alone spacing. The
|
||||||
/// spacing used for the final token in a constructed stream doesn't matter
|
/// spacing used for the final token in a constructed stream doesn't matter
|
||||||
/// because it's never used. In practice we arbitrarily use
|
/// because it's never used. In practice we arbitrarily use
|
||||||
|
|
|
@ -1434,19 +1434,21 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| {
|
let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| {
|
||||||
let bounds =
|
let bounds =
|
||||||
this.arena.alloc_from_iter(bounds.iter().filter_map(|bound| match bound {
|
this.arena.alloc_from_iter(bounds.iter().filter_map(|bound| match bound {
|
||||||
GenericBound::Trait(
|
// We can safely ignore constness here since AST validation
|
||||||
ty,
|
// takes care of rejecting invalid modifier combinations and
|
||||||
TraitBoundModifiers {
|
// const trait bounds in trait object types.
|
||||||
polarity: BoundPolarity::Positive | BoundPolarity::Negative(_),
|
GenericBound::Trait(ty, modifiers) => match modifiers.polarity {
|
||||||
constness,
|
BoundPolarity::Positive | BoundPolarity::Negative(_) => {
|
||||||
},
|
Some(this.lower_poly_trait_ref(
|
||||||
) => Some(this.lower_poly_trait_ref(ty, itctx, *constness)),
|
ty,
|
||||||
// We can safely ignore constness here, since AST validation
|
itctx,
|
||||||
// will take care of invalid modifier combinations.
|
// Still, don't pass along the constness here; we don't want to
|
||||||
GenericBound::Trait(
|
// synthesize any host effect args, it'd only cause problems.
|
||||||
_,
|
ast::BoundConstness::Never,
|
||||||
TraitBoundModifiers { polarity: BoundPolarity::Maybe(_), .. },
|
))
|
||||||
) => None,
|
}
|
||||||
|
BoundPolarity::Maybe(_) => None,
|
||||||
|
},
|
||||||
GenericBound::Outlives(lifetime) => {
|
GenericBound::Outlives(lifetime) => {
|
||||||
if lifetime_bound.is_none() {
|
if lifetime_bound.is_none() {
|
||||||
lifetime_bound = Some(this.lower_lifetime(lifetime));
|
lifetime_bound = Some(this.lower_lifetime(lifetime));
|
||||||
|
|
|
@ -1186,15 +1186,22 @@ mod win {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut dyn Linker) {
|
fn add_sanitizer_libraries(
|
||||||
// On macOS the runtimes are distributed as dylibs which should be linked to
|
sess: &Session,
|
||||||
// both executables and dynamic shared objects. Everywhere else the runtimes
|
flavor: LinkerFlavor,
|
||||||
// are currently distributed as static libraries which should be linked to
|
crate_type: CrateType,
|
||||||
// executables only.
|
linker: &mut dyn Linker,
|
||||||
|
) {
|
||||||
|
// On macOS and Windows using MSVC the runtimes are distributed as dylibs
|
||||||
|
// which should be linked to both executables and dynamic libraries.
|
||||||
|
// Everywhere else the runtimes are currently distributed as static
|
||||||
|
// libraries which should be linked to executables only.
|
||||||
let needs_runtime = !sess.target.is_like_android
|
let needs_runtime = !sess.target.is_like_android
|
||||||
&& match crate_type {
|
&& match crate_type {
|
||||||
CrateType::Executable => true,
|
CrateType::Executable => true,
|
||||||
CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => sess.target.is_like_osx,
|
CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => {
|
||||||
|
sess.target.is_like_osx || sess.target.is_like_msvc
|
||||||
|
}
|
||||||
CrateType::Rlib | CrateType::Staticlib => false,
|
CrateType::Rlib | CrateType::Staticlib => false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1204,26 +1211,31 @@ fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut d
|
||||||
|
|
||||||
let sanitizer = sess.opts.unstable_opts.sanitizer;
|
let sanitizer = sess.opts.unstable_opts.sanitizer;
|
||||||
if sanitizer.contains(SanitizerSet::ADDRESS) {
|
if sanitizer.contains(SanitizerSet::ADDRESS) {
|
||||||
link_sanitizer_runtime(sess, linker, "asan");
|
link_sanitizer_runtime(sess, flavor, linker, "asan");
|
||||||
}
|
}
|
||||||
if sanitizer.contains(SanitizerSet::LEAK) {
|
if sanitizer.contains(SanitizerSet::LEAK) {
|
||||||
link_sanitizer_runtime(sess, linker, "lsan");
|
link_sanitizer_runtime(sess, flavor, linker, "lsan");
|
||||||
}
|
}
|
||||||
if sanitizer.contains(SanitizerSet::MEMORY) {
|
if sanitizer.contains(SanitizerSet::MEMORY) {
|
||||||
link_sanitizer_runtime(sess, linker, "msan");
|
link_sanitizer_runtime(sess, flavor, linker, "msan");
|
||||||
}
|
}
|
||||||
if sanitizer.contains(SanitizerSet::THREAD) {
|
if sanitizer.contains(SanitizerSet::THREAD) {
|
||||||
link_sanitizer_runtime(sess, linker, "tsan");
|
link_sanitizer_runtime(sess, flavor, linker, "tsan");
|
||||||
}
|
}
|
||||||
if sanitizer.contains(SanitizerSet::HWADDRESS) {
|
if sanitizer.contains(SanitizerSet::HWADDRESS) {
|
||||||
link_sanitizer_runtime(sess, linker, "hwasan");
|
link_sanitizer_runtime(sess, flavor, linker, "hwasan");
|
||||||
}
|
}
|
||||||
if sanitizer.contains(SanitizerSet::SAFESTACK) {
|
if sanitizer.contains(SanitizerSet::SAFESTACK) {
|
||||||
link_sanitizer_runtime(sess, linker, "safestack");
|
link_sanitizer_runtime(sess, flavor, linker, "safestack");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
|
fn link_sanitizer_runtime(
|
||||||
|
sess: &Session,
|
||||||
|
flavor: LinkerFlavor,
|
||||||
|
linker: &mut dyn Linker,
|
||||||
|
name: &str,
|
||||||
|
) {
|
||||||
fn find_sanitizer_runtime(sess: &Session, filename: &str) -> PathBuf {
|
fn find_sanitizer_runtime(sess: &Session, filename: &str) -> PathBuf {
|
||||||
let session_tlib =
|
let session_tlib =
|
||||||
filesearch::make_target_lib_path(&sess.sysroot, sess.opts.target_triple.triple());
|
filesearch::make_target_lib_path(&sess.sysroot, sess.opts.target_triple.triple());
|
||||||
|
@ -1254,6 +1266,10 @@ fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
|
||||||
let rpath = path.to_str().expect("non-utf8 component in path");
|
let rpath = path.to_str().expect("non-utf8 component in path");
|
||||||
linker.args(&["-Wl,-rpath", "-Xlinker", rpath]);
|
linker.args(&["-Wl,-rpath", "-Xlinker", rpath]);
|
||||||
linker.link_dylib(&filename, false, true);
|
linker.link_dylib(&filename, false, true);
|
||||||
|
} else if sess.target.is_like_msvc && flavor == LinkerFlavor::Msvc(Lld::No) && name == "asan" {
|
||||||
|
// MSVC provides the `/INFERASANLIBS` argument to automatically find the
|
||||||
|
// compatible ASAN library.
|
||||||
|
linker.arg("/INFERASANLIBS");
|
||||||
} else {
|
} else {
|
||||||
let filename = format!("librustc{channel}_rt.{name}.a");
|
let filename = format!("librustc{channel}_rt.{name}.a");
|
||||||
let path = find_sanitizer_runtime(sess, &filename).join(&filename);
|
let path = find_sanitizer_runtime(sess, &filename).join(&filename);
|
||||||
|
@ -2076,7 +2092,7 @@ fn linker_with_args<'a>(
|
||||||
);
|
);
|
||||||
|
|
||||||
// Sanitizer libraries.
|
// Sanitizer libraries.
|
||||||
add_sanitizer_libraries(sess, crate_type, cmd);
|
add_sanitizer_libraries(sess, flavor, crate_type, cmd);
|
||||||
|
|
||||||
// Object code from the current crate.
|
// Object code from the current crate.
|
||||||
// Take careful note of the ordering of the arguments we pass to the linker
|
// Take careful note of the ordering of the arguments we pass to the linker
|
||||||
|
|
|
@ -1698,7 +1698,7 @@ pub enum Level {
|
||||||
/// internal overflows, some file operation errors.
|
/// internal overflows, some file operation errors.
|
||||||
///
|
///
|
||||||
/// Its `EmissionGuarantee` is `FatalAbort`, except in the non-aborting "almost fatal" case
|
/// Its `EmissionGuarantee` is `FatalAbort`, except in the non-aborting "almost fatal" case
|
||||||
/// that is occasionaly used, where it is `FatalError`.
|
/// that is occasionally used, where it is `FatalError`.
|
||||||
Fatal,
|
Fatal,
|
||||||
|
|
||||||
/// An error in the code being compiled, which prevents compilation from finishing. This is the
|
/// An error in the code being compiled, which prevents compilation from finishing. This is the
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::mbe::transcribe::transcribe;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
|
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
|
||||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
use rustc_attr::{self as attr, TransparencyError};
|
use rustc_attr::{self as attr, TransparencyError};
|
||||||
|
@ -213,7 +213,7 @@ fn expand_macro<'cx>(
|
||||||
let arm_span = rhses[i].span();
|
let arm_span = rhses[i].span();
|
||||||
|
|
||||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||||
let mut tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) {
|
let tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) {
|
||||||
Ok(tts) => tts,
|
Ok(tts) => tts,
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
@ -221,37 +221,6 @@ fn expand_macro<'cx>(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Replace all the tokens for the corresponding positions in the macro, to maintain
|
|
||||||
// proper positions in error reporting, while maintaining the macro_backtrace.
|
|
||||||
if tts.len() == rhs.tts.len() {
|
|
||||||
tts = tts.map_enumerated_owned(|i, mut tt| {
|
|
||||||
let rhs_tt = &rhs.tts[i];
|
|
||||||
let ctxt = tt.span().ctxt();
|
|
||||||
match (&mut tt, rhs_tt) {
|
|
||||||
// preserve the delim spans if able
|
|
||||||
(
|
|
||||||
TokenTree::Delimited(target_sp, ..),
|
|
||||||
mbe::TokenTree::Delimited(source_sp, ..),
|
|
||||||
) => {
|
|
||||||
target_sp.open = source_sp.open.with_ctxt(ctxt);
|
|
||||||
target_sp.close = source_sp.close.with_ctxt(ctxt);
|
|
||||||
}
|
|
||||||
(
|
|
||||||
TokenTree::Delimited(target_sp, ..),
|
|
||||||
mbe::TokenTree::MetaVar(source_sp, ..),
|
|
||||||
) => {
|
|
||||||
target_sp.open = source_sp.with_ctxt(ctxt);
|
|
||||||
target_sp.close = source_sp.with_ctxt(ctxt).shrink_to_hi();
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let sp = rhs_tt.span().with_ctxt(ctxt);
|
|
||||||
tt.set_span(sp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tt
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if cx.trace_macros() {
|
if cx.trace_macros() {
|
||||||
let msg = format!("to `{}`", pprust::tts_to_string(&tts));
|
let msg = format!("to `{}`", pprust::tts_to_string(&tts));
|
||||||
trace_macros_note(&mut cx.expansions, sp, msg);
|
trace_macros_note(&mut cx.expansions, sp, msg);
|
||||||
|
|
|
@ -4,7 +4,7 @@ use crate::errors::{
|
||||||
NoSyntaxVarsExprRepeat, VarStillRepeating,
|
NoSyntaxVarsExprRepeat, VarStillRepeating,
|
||||||
};
|
};
|
||||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
|
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
|
||||||
use crate::mbe::{self, MetaVarExpr};
|
use crate::mbe::{self, KleeneOp, MetaVarExpr};
|
||||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||||
|
@ -42,6 +42,7 @@ enum Frame<'a> {
|
||||||
tts: &'a [mbe::TokenTree],
|
tts: &'a [mbe::TokenTree],
|
||||||
idx: usize,
|
idx: usize,
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
|
kleene_op: KleeneOp,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +208,7 @@ pub(super) fn transcribe<'a>(
|
||||||
|
|
||||||
// Is the repetition empty?
|
// Is the repetition empty?
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
if seq.kleene.op == mbe::KleeneOp::OneOrMore {
|
if seq.kleene.op == KleeneOp::OneOrMore {
|
||||||
// FIXME: this really ought to be caught at macro definition
|
// FIXME: this really ought to be caught at macro definition
|
||||||
// time... It happens when the Kleene operator in the matcher and
|
// time... It happens when the Kleene operator in the matcher and
|
||||||
// the body for the same meta-variable do not match.
|
// the body for the same meta-variable do not match.
|
||||||
|
@ -227,6 +228,7 @@ pub(super) fn transcribe<'a>(
|
||||||
idx: 0,
|
idx: 0,
|
||||||
sep: seq.separator.clone(),
|
sep: seq.separator.clone(),
|
||||||
tts: &delimited.tts,
|
tts: &delimited.tts,
|
||||||
|
kleene_op: seq.kleene.op,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,7 +245,7 @@ pub(super) fn transcribe<'a>(
|
||||||
MatchedTokenTree(tt) => {
|
MatchedTokenTree(tt) => {
|
||||||
// `tt`s are emitted into the output stream directly as "raw tokens",
|
// `tt`s are emitted into the output stream directly as "raw tokens",
|
||||||
// without wrapping them into groups.
|
// without wrapping them into groups.
|
||||||
result.push(tt.clone());
|
result.push(maybe_use_metavar_location(cx, &stack, sp, tt));
|
||||||
}
|
}
|
||||||
MatchedNonterminal(nt) => {
|
MatchedNonterminal(nt) => {
|
||||||
// Other variables are emitted into the output stream as groups with
|
// Other variables are emitted into the output stream as groups with
|
||||||
|
@ -308,6 +310,62 @@ pub(super) fn transcribe<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Usually metavariables `$var` produce interpolated tokens, which have an additional place for
|
||||||
|
/// keeping both the original span and the metavariable span. For `tt` metavariables that's not the
|
||||||
|
/// case however, and there's no place for keeping a second span. So we try to give the single
|
||||||
|
/// produced span a location that would be most useful in practice (the hygiene part of the span
|
||||||
|
/// must not be changed).
|
||||||
|
///
|
||||||
|
/// Different locations are useful for different purposes:
|
||||||
|
/// - The original location is useful when we need to report a diagnostic for the original token in
|
||||||
|
/// isolation, without combining it with any surrounding tokens. This case occurs, but it is not
|
||||||
|
/// very common in practice.
|
||||||
|
/// - The metavariable location is useful when we need to somehow combine the token span with spans
|
||||||
|
/// of its surrounding tokens. This is the most common way to use token spans.
|
||||||
|
///
|
||||||
|
/// So this function replaces the original location with the metavariable location in all cases
|
||||||
|
/// except these two:
|
||||||
|
/// - The metavariable is an element of undelimited sequence `$($tt)*`.
|
||||||
|
/// These are typically used for passing larger amounts of code, and tokens in that code usually
|
||||||
|
/// combine with each other and not with tokens outside of the sequence.
|
||||||
|
/// - The metavariable span comes from a different crate, then we prefer the more local span.
|
||||||
|
///
|
||||||
|
/// FIXME: Find a way to keep both original and metavariable spans for all tokens without
|
||||||
|
/// regressing compilation time too much. Several experiments for adding such spans were made in
|
||||||
|
/// the past (PR #95580, #118517, #118671) and all showed some regressions.
|
||||||
|
fn maybe_use_metavar_location(
|
||||||
|
cx: &ExtCtxt<'_>,
|
||||||
|
stack: &[Frame<'_>],
|
||||||
|
metavar_span: Span,
|
||||||
|
orig_tt: &TokenTree,
|
||||||
|
) -> TokenTree {
|
||||||
|
let undelimited_seq = matches!(
|
||||||
|
stack.last(),
|
||||||
|
Some(Frame::Sequence {
|
||||||
|
tts: [_],
|
||||||
|
sep: None,
|
||||||
|
kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
|
||||||
|
..
|
||||||
|
})
|
||||||
|
);
|
||||||
|
if undelimited_seq || cx.source_map().is_imported(metavar_span) {
|
||||||
|
return orig_tt.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
match orig_tt {
|
||||||
|
TokenTree::Token(Token { kind, span }, spacing) => {
|
||||||
|
let span = metavar_span.with_ctxt(span.ctxt());
|
||||||
|
TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
|
||||||
|
}
|
||||||
|
TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
|
||||||
|
let open = metavar_span.shrink_to_lo().with_ctxt(dspan.open.ctxt());
|
||||||
|
let close = metavar_span.shrink_to_hi().with_ctxt(dspan.close.ctxt());
|
||||||
|
let dspan = DelimSpan::from_pair(open, close);
|
||||||
|
TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
|
/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
|
||||||
/// the set of matches `interpolations`.
|
/// the set of matches `interpolations`.
|
||||||
///
|
///
|
||||||
|
|
|
@ -28,10 +28,10 @@ use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode};
|
||||||
pub(super) fn check_fn<'a, 'tcx>(
|
pub(super) fn check_fn<'a, 'tcx>(
|
||||||
fcx: &mut FnCtxt<'a, 'tcx>,
|
fcx: &mut FnCtxt<'a, 'tcx>,
|
||||||
fn_sig: ty::FnSig<'tcx>,
|
fn_sig: ty::FnSig<'tcx>,
|
||||||
|
coroutine_types: Option<CoroutineTypes<'tcx>>,
|
||||||
decl: &'tcx hir::FnDecl<'tcx>,
|
decl: &'tcx hir::FnDecl<'tcx>,
|
||||||
fn_def_id: LocalDefId,
|
fn_def_id: LocalDefId,
|
||||||
body: &'tcx hir::Body<'tcx>,
|
body: &'tcx hir::Body<'tcx>,
|
||||||
closure_kind: Option<hir::ClosureKind>,
|
|
||||||
params_can_be_unsized: bool,
|
params_can_be_unsized: bool,
|
||||||
) -> Option<CoroutineTypes<'tcx>> {
|
) -> Option<CoroutineTypes<'tcx>> {
|
||||||
let fn_id = fcx.tcx.local_def_id_to_hir_id(fn_def_id);
|
let fn_id = fcx.tcx.local_def_id_to_hir_id(fn_def_id);
|
||||||
|
@ -49,54 +49,13 @@ pub(super) fn check_fn<'a, 'tcx>(
|
||||||
fcx.param_env,
|
fcx.param_env,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
fcx.coroutine_types = coroutine_types;
|
||||||
fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(ret_ty)));
|
fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(ret_ty)));
|
||||||
|
|
||||||
let span = body.value.span;
|
let span = body.value.span;
|
||||||
|
|
||||||
forbid_intrinsic_abi(tcx, span, fn_sig.abi);
|
forbid_intrinsic_abi(tcx, span, fn_sig.abi);
|
||||||
|
|
||||||
if let Some(hir::ClosureKind::Coroutine(kind)) = closure_kind {
|
|
||||||
let yield_ty = match kind {
|
|
||||||
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)
|
|
||||||
| hir::CoroutineKind::Coroutine(_) => {
|
|
||||||
let yield_ty = fcx.next_ty_var(TypeVariableOrigin {
|
|
||||||
kind: TypeVariableOriginKind::TypeInference,
|
|
||||||
span,
|
|
||||||
});
|
|
||||||
fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType);
|
|
||||||
yield_ty
|
|
||||||
}
|
|
||||||
// HACK(-Ztrait-solver=next): In the *old* trait solver, we must eagerly
|
|
||||||
// guide inference on the yield type so that we can handle `AsyncIterator`
|
|
||||||
// in this block in projection correctly. In the new trait solver, it is
|
|
||||||
// not a problem.
|
|
||||||
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
|
|
||||||
let yield_ty = fcx.next_ty_var(TypeVariableOrigin {
|
|
||||||
kind: TypeVariableOriginKind::TypeInference,
|
|
||||||
span,
|
|
||||||
});
|
|
||||||
fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType);
|
|
||||||
|
|
||||||
Ty::new_adt(
|
|
||||||
tcx,
|
|
||||||
tcx.adt_def(tcx.require_lang_item(hir::LangItem::Poll, Some(span))),
|
|
||||||
tcx.mk_args(&[Ty::new_adt(
|
|
||||||
tcx,
|
|
||||||
tcx.adt_def(tcx.require_lang_item(hir::LangItem::Option, Some(span))),
|
|
||||||
tcx.mk_args(&[yield_ty.into()]),
|
|
||||||
)
|
|
||||||
.into()]),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => Ty::new_unit(tcx),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Resume type defaults to `()` if the coroutine has no argument.
|
|
||||||
let resume_ty = fn_sig.inputs().get(0).copied().unwrap_or_else(|| Ty::new_unit(tcx));
|
|
||||||
|
|
||||||
fcx.resume_yield_tys = Some((resume_ty, yield_ty));
|
|
||||||
}
|
|
||||||
|
|
||||||
GatherLocalsVisitor::new(fcx).visit_body(body);
|
GatherLocalsVisitor::new(fcx).visit_body(body);
|
||||||
|
|
||||||
// C-variadic fns also have a `VaList` input that's not listed in `fn_sig`
|
// C-variadic fns also have a `VaList` input that's not listed in `fn_sig`
|
||||||
|
@ -147,25 +106,6 @@ pub(super) fn check_fn<'a, 'tcx>(
|
||||||
fcx.require_type_is_sized(declared_ret_ty, return_or_body_span, traits::SizedReturnType);
|
fcx.require_type_is_sized(declared_ret_ty, return_or_body_span, traits::SizedReturnType);
|
||||||
fcx.check_return_expr(body.value, false);
|
fcx.check_return_expr(body.value, false);
|
||||||
|
|
||||||
// We insert the deferred_coroutine_interiors entry after visiting the body.
|
|
||||||
// This ensures that all nested coroutines appear before the entry of this coroutine.
|
|
||||||
// resolve_coroutine_interiors relies on this property.
|
|
||||||
let coroutine_ty = if let Some(hir::ClosureKind::Coroutine(coroutine_kind)) = closure_kind {
|
|
||||||
let interior = fcx
|
|
||||||
.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span });
|
|
||||||
fcx.deferred_coroutine_interiors.borrow_mut().push((
|
|
||||||
fn_def_id,
|
|
||||||
body.id(),
|
|
||||||
interior,
|
|
||||||
coroutine_kind,
|
|
||||||
));
|
|
||||||
|
|
||||||
let (resume_ty, yield_ty) = fcx.resume_yield_tys.unwrap();
|
|
||||||
Some(CoroutineTypes { resume_ty, yield_ty, interior })
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Finalize the return check by taking the LUB of the return types
|
// Finalize the return check by taking the LUB of the return types
|
||||||
// we saw and assigning it to the expected return type. This isn't
|
// we saw and assigning it to the expected return type. This isn't
|
||||||
// really expected to fail, since the coercions would have failed
|
// really expected to fail, since the coercions would have failed
|
||||||
|
@ -201,7 +141,7 @@ pub(super) fn check_fn<'a, 'tcx>(
|
||||||
check_lang_start_fn(tcx, fn_sig, fn_def_id);
|
check_lang_start_fn(tcx, fn_sig, fn_def_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
coroutine_ty
|
fcx.coroutine_types
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_>) {
|
fn check_panic_info_fn(tcx: TyCtxt<'_>, fn_id: LocalDefId, fn_sig: ty::FnSig<'_>) {
|
||||||
|
|
|
@ -72,7 +72,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
opt_kind: Option<ty::ClosureKind>,
|
opt_kind: Option<ty::ClosureKind>,
|
||||||
expected_sig: Option<ExpectedSig<'tcx>>,
|
expected_sig: Option<ExpectedSig<'tcx>>,
|
||||||
) -> Ty<'tcx> {
|
) -> Ty<'tcx> {
|
||||||
let body = self.tcx.hir().body(closure.body);
|
let tcx = self.tcx;
|
||||||
|
let body = tcx.hir().body(closure.body);
|
||||||
|
|
||||||
trace!("decl = {:#?}", closure.fn_decl);
|
trace!("decl = {:#?}", closure.fn_decl);
|
||||||
let expr_def_id = closure.def_id;
|
let expr_def_id = closure.def_id;
|
||||||
|
@ -83,81 +84,151 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
|
|
||||||
debug!(?bound_sig, ?liberated_sig);
|
debug!(?bound_sig, ?liberated_sig);
|
||||||
|
|
||||||
|
// FIXME: We could probably actually just unify this further --
|
||||||
|
// instead of having a `FnSig` and a `Option<CoroutineTypes>`,
|
||||||
|
// we can have a `ClosureSignature { Coroutine { .. }, Closure { .. } }`,
|
||||||
|
// similar to how `ty::GenSig` is a distinct data structure.
|
||||||
|
let coroutine_types = match closure.kind {
|
||||||
|
hir::ClosureKind::Closure => None,
|
||||||
|
hir::ClosureKind::Coroutine(kind) => {
|
||||||
|
let yield_ty = match kind {
|
||||||
|
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)
|
||||||
|
| hir::CoroutineKind::Coroutine(_) => {
|
||||||
|
let yield_ty = self.next_ty_var(TypeVariableOrigin {
|
||||||
|
kind: TypeVariableOriginKind::TypeInference,
|
||||||
|
span: expr_span,
|
||||||
|
});
|
||||||
|
self.require_type_is_sized(yield_ty, expr_span, traits::SizedYieldType);
|
||||||
|
yield_ty
|
||||||
|
}
|
||||||
|
// HACK(-Ztrait-solver=next): In the *old* trait solver, we must eagerly
|
||||||
|
// guide inference on the yield type so that we can handle `AsyncIterator`
|
||||||
|
// in this block in projection correctly. In the new trait solver, it is
|
||||||
|
// not a problem.
|
||||||
|
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
|
||||||
|
let yield_ty = self.next_ty_var(TypeVariableOrigin {
|
||||||
|
kind: TypeVariableOriginKind::TypeInference,
|
||||||
|
span: expr_span,
|
||||||
|
});
|
||||||
|
self.require_type_is_sized(yield_ty, expr_span, traits::SizedYieldType);
|
||||||
|
|
||||||
|
Ty::new_adt(
|
||||||
|
tcx,
|
||||||
|
tcx.adt_def(
|
||||||
|
tcx.require_lang_item(hir::LangItem::Poll, Some(expr_span)),
|
||||||
|
),
|
||||||
|
tcx.mk_args(&[Ty::new_adt(
|
||||||
|
tcx,
|
||||||
|
tcx.adt_def(
|
||||||
|
tcx.require_lang_item(hir::LangItem::Option, Some(expr_span)),
|
||||||
|
),
|
||||||
|
tcx.mk_args(&[yield_ty.into()]),
|
||||||
|
)
|
||||||
|
.into()]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
|
||||||
|
tcx.types.unit
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Resume type defaults to `()` if the coroutine has no argument.
|
||||||
|
let resume_ty = liberated_sig.inputs().get(0).copied().unwrap_or(tcx.types.unit);
|
||||||
|
|
||||||
|
Some(CoroutineTypes { resume_ty, yield_ty })
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut fcx = FnCtxt::new(self, self.param_env, closure.def_id);
|
let mut fcx = FnCtxt::new(self, self.param_env, closure.def_id);
|
||||||
let coroutine_types = check_fn(
|
check_fn(
|
||||||
&mut fcx,
|
&mut fcx,
|
||||||
liberated_sig,
|
liberated_sig,
|
||||||
|
coroutine_types,
|
||||||
closure.fn_decl,
|
closure.fn_decl,
|
||||||
expr_def_id,
|
expr_def_id,
|
||||||
body,
|
body,
|
||||||
Some(closure.kind),
|
|
||||||
// Closure "rust-call" ABI doesn't support unsized params
|
// Closure "rust-call" ABI doesn't support unsized params
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
let parent_args = GenericArgs::identity_for_item(
|
let parent_args =
|
||||||
self.tcx,
|
GenericArgs::identity_for_item(tcx, tcx.typeck_root_def_id(expr_def_id.to_def_id()));
|
||||||
self.tcx.typeck_root_def_id(expr_def_id.to_def_id()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let tupled_upvars_ty = self.next_root_ty_var(TypeVariableOrigin {
|
let tupled_upvars_ty = self.next_root_ty_var(TypeVariableOrigin {
|
||||||
kind: TypeVariableOriginKind::ClosureSynthetic,
|
kind: TypeVariableOriginKind::ClosureSynthetic,
|
||||||
span: self.tcx.def_span(expr_def_id),
|
span: expr_span,
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(CoroutineTypes { resume_ty, yield_ty, interior }) = coroutine_types {
|
match closure.kind {
|
||||||
let coroutine_args = ty::CoroutineArgs::new(
|
hir::ClosureKind::Closure => {
|
||||||
self.tcx,
|
assert_eq!(coroutine_types, None);
|
||||||
ty::CoroutineArgsParts {
|
// Tuple up the arguments and insert the resulting function type into
|
||||||
parent_args,
|
// the `closures` table.
|
||||||
resume_ty,
|
let sig = bound_sig.map_bound(|sig| {
|
||||||
yield_ty,
|
tcx.mk_fn_sig(
|
||||||
return_ty: liberated_sig.output(),
|
[Ty::new_tup(tcx, sig.inputs())],
|
||||||
witness: interior,
|
sig.output(),
|
||||||
tupled_upvars_ty,
|
sig.c_variadic,
|
||||||
},
|
sig.unsafety,
|
||||||
);
|
sig.abi,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
return Ty::new_coroutine(self.tcx, expr_def_id.to_def_id(), coroutine_args.args);
|
debug!(?sig, ?opt_kind);
|
||||||
|
|
||||||
|
let closure_kind_ty = match opt_kind {
|
||||||
|
Some(kind) => Ty::from_closure_kind(tcx, kind),
|
||||||
|
|
||||||
|
// Create a type variable (for now) to represent the closure kind.
|
||||||
|
// It will be unified during the upvar inference phase (`upvar.rs`)
|
||||||
|
None => self.next_root_ty_var(TypeVariableOrigin {
|
||||||
|
// FIXME(eddyb) distinguish closure kind inference variables from the rest.
|
||||||
|
kind: TypeVariableOriginKind::ClosureSynthetic,
|
||||||
|
span: expr_span,
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
|
||||||
|
let closure_args = ty::ClosureArgs::new(
|
||||||
|
tcx,
|
||||||
|
ty::ClosureArgsParts {
|
||||||
|
parent_args,
|
||||||
|
closure_kind_ty,
|
||||||
|
closure_sig_as_fn_ptr_ty: Ty::new_fn_ptr(tcx, sig),
|
||||||
|
tupled_upvars_ty,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
Ty::new_closure(tcx, expr_def_id.to_def_id(), closure_args.args)
|
||||||
|
}
|
||||||
|
hir::ClosureKind::Coroutine(_) => {
|
||||||
|
let Some(CoroutineTypes { resume_ty, yield_ty }) = coroutine_types else {
|
||||||
|
bug!("expected coroutine to have yield/resume types");
|
||||||
|
};
|
||||||
|
let interior = fcx.next_ty_var(TypeVariableOrigin {
|
||||||
|
kind: TypeVariableOriginKind::MiscVariable,
|
||||||
|
span: body.value.span,
|
||||||
|
});
|
||||||
|
fcx.deferred_coroutine_interiors.borrow_mut().push((
|
||||||
|
expr_def_id,
|
||||||
|
body.id(),
|
||||||
|
interior,
|
||||||
|
));
|
||||||
|
|
||||||
|
let coroutine_args = ty::CoroutineArgs::new(
|
||||||
|
tcx,
|
||||||
|
ty::CoroutineArgsParts {
|
||||||
|
parent_args,
|
||||||
|
resume_ty,
|
||||||
|
yield_ty,
|
||||||
|
return_ty: liberated_sig.output(),
|
||||||
|
witness: interior,
|
||||||
|
tupled_upvars_ty,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
Ty::new_coroutine(tcx, expr_def_id.to_def_id(), coroutine_args.args)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tuple up the arguments and insert the resulting function type into
|
|
||||||
// the `closures` table.
|
|
||||||
let sig = bound_sig.map_bound(|sig| {
|
|
||||||
self.tcx.mk_fn_sig(
|
|
||||||
[Ty::new_tup(self.tcx, sig.inputs())],
|
|
||||||
sig.output(),
|
|
||||||
sig.c_variadic,
|
|
||||||
sig.unsafety,
|
|
||||||
sig.abi,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
debug!(?sig, ?opt_kind);
|
|
||||||
|
|
||||||
let closure_kind_ty = match opt_kind {
|
|
||||||
Some(kind) => Ty::from_closure_kind(self.tcx, kind),
|
|
||||||
|
|
||||||
// Create a type variable (for now) to represent the closure kind.
|
|
||||||
// It will be unified during the upvar inference phase (`upvar.rs`)
|
|
||||||
None => self.next_root_ty_var(TypeVariableOrigin {
|
|
||||||
// FIXME(eddyb) distinguish closure kind inference variables from the rest.
|
|
||||||
kind: TypeVariableOriginKind::ClosureSynthetic,
|
|
||||||
span: expr_span,
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
|
|
||||||
let closure_args = ty::ClosureArgs::new(
|
|
||||||
self.tcx,
|
|
||||||
ty::ClosureArgsParts {
|
|
||||||
parent_args,
|
|
||||||
closure_kind_ty,
|
|
||||||
closure_sig_as_fn_ptr_ty: Ty::new_fn_ptr(self.tcx, sig),
|
|
||||||
tupled_upvars_ty,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
Ty::new_closure(self.tcx, expr_def_id.to_def_id(), closure_args.args)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given the expected type, figures out what it can about this closure we
|
/// Given the expected type, figures out what it can about this closure we
|
||||||
|
|
|
@ -15,6 +15,7 @@ use crate::errors::{
|
||||||
use crate::fatally_break_rust;
|
use crate::fatally_break_rust;
|
||||||
use crate::method::SelfSource;
|
use crate::method::SelfSource;
|
||||||
use crate::type_error_struct;
|
use crate::type_error_struct;
|
||||||
|
use crate::CoroutineTypes;
|
||||||
use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation};
|
use crate::Expectation::{self, ExpectCastableToType, ExpectHasType, NoExpectation};
|
||||||
use crate::{
|
use crate::{
|
||||||
report_unexpected_variant_res, BreakableCtxt, Diverges, FnCtxt, Needs,
|
report_unexpected_variant_res, BreakableCtxt, Diverges, FnCtxt, Needs,
|
||||||
|
@ -3163,8 +3164,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
value: &'tcx hir::Expr<'tcx>,
|
value: &'tcx hir::Expr<'tcx>,
|
||||||
expr: &'tcx hir::Expr<'tcx>,
|
expr: &'tcx hir::Expr<'tcx>,
|
||||||
) -> Ty<'tcx> {
|
) -> Ty<'tcx> {
|
||||||
match self.resume_yield_tys {
|
match self.coroutine_types {
|
||||||
Some((resume_ty, yield_ty)) => {
|
Some(CoroutineTypes { resume_ty, yield_ty }) => {
|
||||||
self.check_expr_coercible_to_type(value, yield_ty, None);
|
self.check_expr_coercible_to_type(value, yield_ty, None);
|
||||||
|
|
||||||
resume_ty
|
resume_ty
|
||||||
|
|
|
@ -85,7 +85,7 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// not setting the `fallback_has_occured` field here because
|
// not setting the `fallback_has_occurred` field here because
|
||||||
// that field is only used for type fallback diagnostics.
|
// that field is only used for type fallback diagnostics.
|
||||||
for effect in unsolved_effects {
|
for effect in unsolved_effects {
|
||||||
let expected = self.tcx.consts.true_;
|
let expected = self.tcx.consts.true_;
|
||||||
|
|
|
@ -534,7 +534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
let coroutines = std::mem::take(&mut *self.deferred_coroutine_interiors.borrow_mut());
|
let coroutines = std::mem::take(&mut *self.deferred_coroutine_interiors.borrow_mut());
|
||||||
debug!(?coroutines);
|
debug!(?coroutines);
|
||||||
|
|
||||||
for &(expr_def_id, body_id, interior, _) in coroutines.iter() {
|
for &(expr_def_id, body_id, interior) in coroutines.iter() {
|
||||||
debug!(?expr_def_id);
|
debug!(?expr_def_id);
|
||||||
|
|
||||||
// Create the `CoroutineWitness` type that we will unify with `interior`.
|
// Create the `CoroutineWitness` type that we will unify with `interior`.
|
||||||
|
|
|
@ -5,7 +5,7 @@ mod checks;
|
||||||
mod suggestions;
|
mod suggestions;
|
||||||
|
|
||||||
use crate::coercion::DynamicCoerceMany;
|
use crate::coercion::DynamicCoerceMany;
|
||||||
use crate::{Diverges, EnclosingBreakables, Inherited};
|
use crate::{CoroutineTypes, Diverges, EnclosingBreakables, Inherited};
|
||||||
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||||
|
@ -68,7 +68,7 @@ pub struct FnCtxt<'a, 'tcx> {
|
||||||
/// First span of a return site that we find. Used in error messages.
|
/// First span of a return site that we find. Used in error messages.
|
||||||
pub(super) ret_coercion_span: Cell<Option<Span>>,
|
pub(super) ret_coercion_span: Cell<Option<Span>>,
|
||||||
|
|
||||||
pub(super) resume_yield_tys: Option<(Ty<'tcx>, Ty<'tcx>)>,
|
pub(super) coroutine_types: Option<CoroutineTypes<'tcx>>,
|
||||||
|
|
||||||
/// Whether the last checked node generates a divergence (e.g.,
|
/// Whether the last checked node generates a divergence (e.g.,
|
||||||
/// `return` will set this to `Always`). In general, when entering
|
/// `return` will set this to `Always`). In general, when entering
|
||||||
|
@ -122,7 +122,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
err_count_on_creation: inh.tcx.dcx().err_count(),
|
err_count_on_creation: inh.tcx.dcx().err_count(),
|
||||||
ret_coercion: None,
|
ret_coercion: None,
|
||||||
ret_coercion_span: Cell::new(None),
|
ret_coercion_span: Cell::new(None),
|
||||||
resume_yield_tys: None,
|
coroutine_types: None,
|
||||||
diverges: Cell::new(Diverges::Maybe),
|
diverges: Cell::new(Diverges::Maybe),
|
||||||
enclosing_breakables: RefCell::new(EnclosingBreakables {
|
enclosing_breakables: RefCell::new(EnclosingBreakables {
|
||||||
stack: Vec::new(),
|
stack: Vec::new(),
|
||||||
|
|
|
@ -55,8 +55,7 @@ pub struct Inherited<'tcx> {
|
||||||
|
|
||||||
pub(super) deferred_asm_checks: RefCell<Vec<(&'tcx hir::InlineAsm<'tcx>, hir::HirId)>>,
|
pub(super) deferred_asm_checks: RefCell<Vec<(&'tcx hir::InlineAsm<'tcx>, hir::HirId)>>,
|
||||||
|
|
||||||
pub(super) deferred_coroutine_interiors:
|
pub(super) deferred_coroutine_interiors: RefCell<Vec<(LocalDefId, hir::BodyId, Ty<'tcx>)>>,
|
||||||
RefCell<Vec<(LocalDefId, hir::BodyId, Ty<'tcx>, hir::CoroutineKind)>>,
|
|
||||||
|
|
||||||
/// Whenever we introduce an adjustment from `!` into a type variable,
|
/// Whenever we introduce an adjustment from `!` into a type variable,
|
||||||
/// we record that type variable here. This is later used to inform
|
/// we record that type variable here. This is later used to inform
|
||||||
|
|
|
@ -193,7 +193,7 @@ fn typeck_with_fallback<'tcx>(
|
||||||
let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig);
|
let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig);
|
||||||
let fn_sig = fcx.normalize(body.value.span, fn_sig);
|
let fn_sig = fcx.normalize(body.value.span, fn_sig);
|
||||||
|
|
||||||
check_fn(&mut fcx, fn_sig, decl, def_id, body, None, tcx.features().unsized_fn_params);
|
check_fn(&mut fcx, fn_sig, None, decl, def_id, body, tcx.features().unsized_fn_params);
|
||||||
} else {
|
} else {
|
||||||
let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer, span, .. }) = body_ty {
|
let expected_type = if let Some(&hir::Ty { kind: hir::TyKind::Infer, span, .. }) = body_ty {
|
||||||
Some(fcx.next_ty_var(TypeVariableOrigin {
|
Some(fcx.next_ty_var(TypeVariableOrigin {
|
||||||
|
@ -295,15 +295,13 @@ fn typeck_with_fallback<'tcx>(
|
||||||
/// When `check_fn` is invoked on a coroutine (i.e., a body that
|
/// When `check_fn` is invoked on a coroutine (i.e., a body that
|
||||||
/// includes yield), it returns back some information about the yield
|
/// includes yield), it returns back some information about the yield
|
||||||
/// points.
|
/// points.
|
||||||
|
#[derive(Debug, PartialEq, Copy, Clone)]
|
||||||
struct CoroutineTypes<'tcx> {
|
struct CoroutineTypes<'tcx> {
|
||||||
/// Type of coroutine argument / values returned by `yield`.
|
/// Type of coroutine argument / values returned by `yield`.
|
||||||
resume_ty: Ty<'tcx>,
|
resume_ty: Ty<'tcx>,
|
||||||
|
|
||||||
/// Type of value that is yielded.
|
/// Type of value that is yielded.
|
||||||
yield_ty: Ty<'tcx>,
|
yield_ty: Ty<'tcx>,
|
||||||
|
|
||||||
/// Types that are captured (see `CoroutineInterior` for more).
|
|
||||||
interior: Ty<'tcx>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
|
|
|
@ -2252,6 +2252,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
&self,
|
&self,
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
errors: Vec<FulfillmentError<'tcx>>,
|
errors: Vec<FulfillmentError<'tcx>>,
|
||||||
|
suggest_derive: bool,
|
||||||
) {
|
) {
|
||||||
let all_local_types_needing_impls =
|
let all_local_types_needing_impls =
|
||||||
errors.iter().all(|e| match e.obligation.predicate.kind().skip_binder() {
|
errors.iter().all(|e| match e.obligation.predicate.kind().skip_binder() {
|
||||||
|
@ -2322,10 +2323,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|e| (e.obligation.predicate, None, Some(e.obligation.cause.clone())))
|
.map(|e| (e.obligation.predicate, None, Some(e.obligation.cause.clone())))
|
||||||
.collect();
|
.collect();
|
||||||
self.suggest_derive(err, &preds);
|
if suggest_derive {
|
||||||
|
self.suggest_derive(err, &preds);
|
||||||
|
} else {
|
||||||
|
// The predicate comes from a binop where the lhs and rhs have different types.
|
||||||
|
let _ = self.note_predicate_source_and_get_derives(err, &preds);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn suggest_derive(
|
fn note_predicate_source_and_get_derives(
|
||||||
&self,
|
&self,
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
unsatisfied_predicates: &[(
|
unsatisfied_predicates: &[(
|
||||||
|
@ -2333,7 +2339,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
Option<ty::Predicate<'tcx>>,
|
Option<ty::Predicate<'tcx>>,
|
||||||
Option<ObligationCause<'tcx>>,
|
Option<ObligationCause<'tcx>>,
|
||||||
)],
|
)],
|
||||||
) {
|
) -> Vec<(String, Span, Symbol)> {
|
||||||
let mut derives = Vec::<(String, Span, Symbol)>::new();
|
let mut derives = Vec::<(String, Span, Symbol)>::new();
|
||||||
let mut traits = Vec::new();
|
let mut traits = Vec::new();
|
||||||
for (pred, _, _) in unsatisfied_predicates {
|
for (pred, _, _) in unsatisfied_predicates {
|
||||||
|
@ -2382,21 +2388,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
traits.sort();
|
traits.sort();
|
||||||
traits.dedup();
|
traits.dedup();
|
||||||
|
|
||||||
derives.sort();
|
|
||||||
derives.dedup();
|
|
||||||
|
|
||||||
let mut derives_grouped = Vec::<(String, Span, String)>::new();
|
|
||||||
for (self_name, self_span, trait_name) in derives.into_iter() {
|
|
||||||
if let Some((last_self_name, _, ref mut last_trait_names)) = derives_grouped.last_mut()
|
|
||||||
{
|
|
||||||
if last_self_name == &self_name {
|
|
||||||
last_trait_names.push_str(format!(", {trait_name}").as_str());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
derives_grouped.push((self_name, self_span, trait_name.to_string()));
|
|
||||||
}
|
|
||||||
|
|
||||||
let len = traits.len();
|
let len = traits.len();
|
||||||
if len > 0 {
|
if len > 0 {
|
||||||
let span =
|
let span =
|
||||||
|
@ -2419,6 +2410,34 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
derives
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn suggest_derive(
|
||||||
|
&self,
|
||||||
|
err: &mut Diagnostic,
|
||||||
|
unsatisfied_predicates: &[(
|
||||||
|
ty::Predicate<'tcx>,
|
||||||
|
Option<ty::Predicate<'tcx>>,
|
||||||
|
Option<ObligationCause<'tcx>>,
|
||||||
|
)],
|
||||||
|
) {
|
||||||
|
let mut derives = self.note_predicate_source_and_get_derives(err, unsatisfied_predicates);
|
||||||
|
derives.sort();
|
||||||
|
derives.dedup();
|
||||||
|
|
||||||
|
let mut derives_grouped = Vec::<(String, Span, String)>::new();
|
||||||
|
for (self_name, self_span, trait_name) in derives.into_iter() {
|
||||||
|
if let Some((last_self_name, _, ref mut last_trait_names)) = derives_grouped.last_mut()
|
||||||
|
{
|
||||||
|
if last_self_name == &self_name {
|
||||||
|
last_trait_names.push_str(format!(", {trait_name}").as_str());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
derives_grouped.push((self_name, self_span, trait_name.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
for (self_name, self_span, traits) in &derives_grouped {
|
for (self_name, self_span, traits) in &derives_grouped {
|
||||||
err.span_suggestion_verbose(
|
err.span_suggestion_verbose(
|
||||||
self_span.shrink_to_lo(),
|
self_span.shrink_to_lo(),
|
||||||
|
|
|
@ -318,7 +318,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
lhs_expr.span,
|
lhs_expr.span,
|
||||||
format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty),
|
format!("cannot use `{}=` on type `{}`", op.node.as_str(), lhs_ty),
|
||||||
);
|
);
|
||||||
self.note_unmet_impls_on_type(&mut err, errors);
|
self.note_unmet_impls_on_type(&mut err, errors, false);
|
||||||
(err, None)
|
(err, None)
|
||||||
}
|
}
|
||||||
IsAssign::No => {
|
IsAssign::No => {
|
||||||
|
@ -375,7 +375,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
err.span_label(lhs_expr.span, lhs_ty.to_string());
|
err.span_label(lhs_expr.span, lhs_ty.to_string());
|
||||||
err.span_label(rhs_expr.span, rhs_ty.to_string());
|
err.span_label(rhs_expr.span, rhs_ty.to_string());
|
||||||
}
|
}
|
||||||
self.note_unmet_impls_on_type(&mut err, errors);
|
let suggest_derive = self.can_eq(self.param_env, lhs_ty, rhs_ty);
|
||||||
|
self.note_unmet_impls_on_type(&mut err, errors, suggest_derive);
|
||||||
(err, output_def_id)
|
(err, output_def_id)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -852,7 +853,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
Str | Never | Char | Tuple(_) | Array(_, _) => {}
|
Str | Never | Char | Tuple(_) | Array(_, _) => {}
|
||||||
Ref(_, lty, _) if *lty.kind() == Str => {}
|
Ref(_, lty, _) if *lty.kind() == Str => {}
|
||||||
_ => {
|
_ => {
|
||||||
self.note_unmet_impls_on_type(&mut err, errors);
|
self.note_unmet_impls_on_type(&mut err, errors, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2734,10 +2734,13 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels {
|
||||||
#[allow(rustc::diagnostic_outside_of_impl)]
|
#[allow(rustc::diagnostic_outside_of_impl)]
|
||||||
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
|
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
|
||||||
if let hir::Expr {
|
if let hir::Expr {
|
||||||
kind: hir::ExprKind::InlineAsm(hir::InlineAsm { template_strs, .. }),
|
kind: hir::ExprKind::InlineAsm(hir::InlineAsm { template_strs, options, .. }),
|
||||||
..
|
..
|
||||||
} = expr
|
} = expr
|
||||||
{
|
{
|
||||||
|
// asm with `options(raw)` does not do replacement with `{` and `}`.
|
||||||
|
let raw = options.contains(InlineAsmOptions::RAW);
|
||||||
|
|
||||||
for (template_sym, template_snippet, template_span) in template_strs.iter() {
|
for (template_sym, template_snippet, template_span) in template_strs.iter() {
|
||||||
let template_str = template_sym.as_str();
|
let template_str = template_sym.as_str();
|
||||||
let find_label_span = |needle: &str| -> Option<Span> {
|
let find_label_span = |needle: &str| -> Option<Span> {
|
||||||
|
@ -2763,24 +2766,57 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels {
|
||||||
for statement in statements {
|
for statement in statements {
|
||||||
// If there's a comment, trim it from the statement
|
// If there's a comment, trim it from the statement
|
||||||
let statement = statement.find("//").map_or(statement, |idx| &statement[..idx]);
|
let statement = statement.find("//").map_or(statement, |idx| &statement[..idx]);
|
||||||
|
|
||||||
|
// In this loop, if there is ever a non-label, no labels can come after it.
|
||||||
let mut start_idx = 0;
|
let mut start_idx = 0;
|
||||||
for (idx, _) in statement.match_indices(':') {
|
'label_loop: for (idx, _) in statement.match_indices(':') {
|
||||||
let possible_label = statement[start_idx..idx].trim();
|
let possible_label = statement[start_idx..idx].trim();
|
||||||
let mut chars = possible_label.chars();
|
let mut chars = possible_label.chars();
|
||||||
let Some(c) = chars.next() else {
|
|
||||||
// Empty string means a leading ':' in this section, which is not a label
|
let Some(start) = chars.next() else {
|
||||||
break;
|
// Empty string means a leading ':' in this section, which is not a label.
|
||||||
|
break 'label_loop;
|
||||||
};
|
};
|
||||||
// A label starts with an alphabetic character or . or _ and continues with alphanumeric characters, _, or $
|
|
||||||
if (c.is_alphabetic() || matches!(c, '.' | '_'))
|
// Whether a { bracket has been seen and its } hasn't been found yet.
|
||||||
&& chars.all(|c| c.is_alphanumeric() || matches!(c, '_' | '$'))
|
let mut in_bracket = false;
|
||||||
{
|
|
||||||
found_labels.push(possible_label);
|
// A label starts with an ASCII alphabetic character or . or _
|
||||||
} else {
|
// A label can also start with a format arg, if it's not a raw asm block.
|
||||||
// If we encounter a non-label, there cannot be any further labels, so stop checking
|
if !raw && start == '{' {
|
||||||
break;
|
in_bracket = true;
|
||||||
|
} else if !(start.is_ascii_alphabetic() || matches!(start, '.' | '_')) {
|
||||||
|
break 'label_loop;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Labels continue with ASCII alphanumeric characters, _, or $
|
||||||
|
for c in chars {
|
||||||
|
// Inside a template format arg, any character is permitted for the puproses of label detection
|
||||||
|
// because we assume that it can be replaced with some other valid label string later.
|
||||||
|
// `options(raw)` asm blocks cannot have format args, so they are excluded from this special case.
|
||||||
|
if !raw && in_bracket {
|
||||||
|
if c == '{' {
|
||||||
|
// Nested brackets are not allowed in format args, this cannot be a label.
|
||||||
|
break 'label_loop;
|
||||||
|
}
|
||||||
|
|
||||||
|
if c == '}' {
|
||||||
|
// The end of the format arg.
|
||||||
|
in_bracket = false;
|
||||||
|
}
|
||||||
|
} else if !raw && c == '{' {
|
||||||
|
// Start of a format arg.
|
||||||
|
in_bracket = true;
|
||||||
|
} else {
|
||||||
|
if !(c.is_ascii_alphanumeric() || matches!(c, '_' | '$')) {
|
||||||
|
// The potential label had an invalid character inside it, it cannot be a label.
|
||||||
|
break 'label_loop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If all characters passed the label checks, this is likely a label.
|
||||||
|
found_labels.push(possible_label);
|
||||||
start_idx = idx + 1;
|
start_idx = idx + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -771,6 +771,9 @@ parse_unexpected_if_with_if = unexpected `if` in the condition expression
|
||||||
parse_unexpected_lifetime_in_pattern = unexpected lifetime `{$symbol}` in pattern
|
parse_unexpected_lifetime_in_pattern = unexpected lifetime `{$symbol}` in pattern
|
||||||
.suggestion = remove the lifetime
|
.suggestion = remove the lifetime
|
||||||
|
|
||||||
|
parse_unexpected_paren_in_range_pat = range pattern bounds cannot have parentheses
|
||||||
|
parse_unexpected_paren_in_range_pat_sugg = remove these parentheses
|
||||||
|
|
||||||
parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
|
parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
|
||||||
.suggestion = remove parentheses in `for` loop
|
.suggestion = remove parentheses in `for` loop
|
||||||
|
|
||||||
|
|
|
@ -2378,6 +2378,27 @@ pub(crate) struct ExpectedCommaAfterPatternField {
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(parse_unexpected_paren_in_range_pat)]
|
||||||
|
pub(crate) struct UnexpectedParenInRangePat {
|
||||||
|
#[primary_span]
|
||||||
|
pub span: Vec<Span>,
|
||||||
|
#[subdiagnostic]
|
||||||
|
pub sugg: UnexpectedParenInRangePatSugg,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subdiagnostic)]
|
||||||
|
#[multipart_suggestion(
|
||||||
|
parse_unexpected_paren_in_range_pat_sugg,
|
||||||
|
applicability = "machine-applicable"
|
||||||
|
)]
|
||||||
|
pub(crate) struct UnexpectedParenInRangePatSugg {
|
||||||
|
#[suggestion_part(code = "")]
|
||||||
|
pub start_span: Span,
|
||||||
|
#[suggestion_part(code = "")]
|
||||||
|
pub end_span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(parse_return_types_use_thin_arrow)]
|
#[diag(parse_return_types_use_thin_arrow)]
|
||||||
pub(crate) struct ReturnTypesUseThinArrow {
|
pub(crate) struct ReturnTypesUseThinArrow {
|
||||||
|
|
|
@ -6,7 +6,8 @@ use crate::errors::{
|
||||||
InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern,
|
InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern,
|
||||||
PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern,
|
PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern,
|
||||||
SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
|
SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
|
||||||
TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
|
TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedParenInRangePat,
|
||||||
|
UnexpectedParenInRangePatSugg, UnexpectedVertVertBeforeFunctionParam,
|
||||||
UnexpectedVertVertInPattern,
|
UnexpectedVertVertInPattern,
|
||||||
};
|
};
|
||||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||||
|
@ -579,6 +580,8 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Parse a tuple or parenthesis pattern.
|
/// Parse a tuple or parenthesis pattern.
|
||||||
fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
|
fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
|
||||||
|
let open_paren = self.token.span;
|
||||||
|
|
||||||
let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
|
let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
|
||||||
p.parse_pat_allow_top_alt(
|
p.parse_pat_allow_top_alt(
|
||||||
None,
|
None,
|
||||||
|
@ -591,7 +594,29 @@ impl<'a> Parser<'a> {
|
||||||
// Here, `(pat,)` is a tuple pattern.
|
// Here, `(pat,)` is a tuple pattern.
|
||||||
// For backward compatibility, `(..)` is a tuple pattern as well.
|
// For backward compatibility, `(..)` is a tuple pattern as well.
|
||||||
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
|
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
|
||||||
PatKind::Paren(fields.into_iter().next().unwrap())
|
let pat = fields.into_iter().next().unwrap();
|
||||||
|
let close_paren = self.prev_token.span;
|
||||||
|
|
||||||
|
match &pat.kind {
|
||||||
|
// recover ranges with parentheses around the `(start)..`
|
||||||
|
PatKind::Lit(begin)
|
||||||
|
if self.may_recover()
|
||||||
|
&& let Some(form) = self.parse_range_end() =>
|
||||||
|
{
|
||||||
|
self.dcx().emit_err(UnexpectedParenInRangePat {
|
||||||
|
span: vec![open_paren, close_paren],
|
||||||
|
sugg: UnexpectedParenInRangePatSugg {
|
||||||
|
start_span: open_paren,
|
||||||
|
end_span: close_paren,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
self.parse_pat_range_begin_with(begin.clone(), form)?
|
||||||
|
}
|
||||||
|
|
||||||
|
// (pat) with optional parentheses
|
||||||
|
_ => PatKind::Paren(pat),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
PatKind::Tuple(fields)
|
PatKind::Tuple(fields)
|
||||||
})
|
})
|
||||||
|
@ -794,11 +819,21 @@ impl<'a> Parser<'a> {
|
||||||
|| t.can_begin_literal_maybe_minus() // e.g. `42`.
|
|| t.can_begin_literal_maybe_minus() // e.g. `42`.
|
||||||
|| t.is_whole_expr()
|
|| t.is_whole_expr()
|
||||||
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
|| t.is_lifetime() // recover `'a` instead of `'a'`
|
||||||
|
|| (self.may_recover() // recover leading `(`
|
||||||
|
&& t.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||||
|
&& self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
|
||||||
|
&& self.is_pat_range_end_start(dist + 1))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parse a range pattern end bound
|
||||||
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
if self.check_inline_const(0) {
|
// recover leading `(`
|
||||||
|
let open_paren = (self.may_recover()
|
||||||
|
&& self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis)))
|
||||||
|
.then_some(self.prev_token.span);
|
||||||
|
|
||||||
|
let bound = if self.check_inline_const(0) {
|
||||||
self.parse_const_block(self.token.span, true)
|
self.parse_const_block(self.token.span, true)
|
||||||
} else if self.check_path() {
|
} else if self.check_path() {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
|
@ -814,7 +849,22 @@ impl<'a> Parser<'a> {
|
||||||
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
|
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
|
||||||
} else {
|
} else {
|
||||||
self.parse_literal_maybe_minus()
|
self.parse_literal_maybe_minus()
|
||||||
|
}?;
|
||||||
|
|
||||||
|
// recover trailing `)`
|
||||||
|
if let Some(open_paren) = open_paren {
|
||||||
|
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||||
|
|
||||||
|
self.dcx().emit_err(UnexpectedParenInRangePat {
|
||||||
|
span: vec![open_paren, self.prev_token.span],
|
||||||
|
sugg: UnexpectedParenInRangePatSugg {
|
||||||
|
start_span: open_paren,
|
||||||
|
end_span: self.prev_token.span,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(bound)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Is this the start of a pattern beginning with a path?
|
/// Is this the start of a pattern beginning with a path?
|
||||||
|
|
|
@ -1274,7 +1274,10 @@ fn validate_commandline_args_with_session_available(sess: &Session) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cannot enable crt-static with sanitizers on Linux
|
// Cannot enable crt-static with sanitizers on Linux
|
||||||
if sess.crt_static(None) && !sess.opts.unstable_opts.sanitizer.is_empty() {
|
if sess.crt_static(None)
|
||||||
|
&& !sess.opts.unstable_opts.sanitizer.is_empty()
|
||||||
|
&& !sess.target.is_like_msvc
|
||||||
|
{
|
||||||
sess.dcx().emit_err(errors::CannotEnableCrtStaticLinux);
|
sess.dcx().emit_err(errors::CannotEnableCrtStaticLinux);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
use crate::spec::{base, LinkerFlavor, Lld, Target};
|
use crate::spec::{base, LinkerFlavor, Lld, SanitizerSet, Target};
|
||||||
|
|
||||||
pub fn target() -> Target {
|
pub fn target() -> Target {
|
||||||
let mut base = base::windows_msvc::opts();
|
let mut base = base::windows_msvc::opts();
|
||||||
base.cpu = "pentium4".into();
|
base.cpu = "pentium4".into();
|
||||||
base.max_atomic_width = Some(64);
|
base.max_atomic_width = Some(64);
|
||||||
|
base.supported_sanitizers = SanitizerSet::ADDRESS;
|
||||||
|
|
||||||
base.add_pre_link_args(
|
base.add_pre_link_args(
|
||||||
LinkerFlavor::Msvc(Lld::No),
|
LinkerFlavor::Msvc(Lld::No),
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
use crate::spec::{base, Target};
|
use crate::spec::{base, SanitizerSet, Target};
|
||||||
|
|
||||||
pub fn target() -> Target {
|
pub fn target() -> Target {
|
||||||
let mut base = base::windows_msvc::opts();
|
let mut base = base::windows_msvc::opts();
|
||||||
base.cpu = "x86-64".into();
|
base.cpu = "x86-64".into();
|
||||||
base.plt_by_default = false;
|
base.plt_by_default = false;
|
||||||
base.max_atomic_width = Some(64);
|
base.max_atomic_width = Some(64);
|
||||||
|
base.supported_sanitizers = SanitizerSet::ADDRESS;
|
||||||
|
|
||||||
Target {
|
Target {
|
||||||
llvm_target: "x86_64-pc-windows-msvc".into(),
|
llvm_target: "x86_64-pc-windows-msvc".into(),
|
||||||
|
|
|
@ -73,8 +73,18 @@ impl UnixListener {
|
||||||
unsafe {
|
unsafe {
|
||||||
let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?;
|
let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?;
|
||||||
let (addr, len) = sockaddr_un(path.as_ref())?;
|
let (addr, len) = sockaddr_un(path.as_ref())?;
|
||||||
const backlog: libc::c_int =
|
#[cfg(any(target_os = "windows", target_os = "redox"))]
|
||||||
if cfg!(any(target_os = "linux", target_os = "freebsd")) { -1 } else { 128 };
|
const backlog: libc::c_int = 128;
|
||||||
|
#[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "openbsd"))]
|
||||||
|
const backlog: libc::c_int = -1;
|
||||||
|
#[cfg(not(any(
|
||||||
|
target_os = "windows",
|
||||||
|
target_os = "redox",
|
||||||
|
target_os = "linux",
|
||||||
|
target_os = "freebsd",
|
||||||
|
target_os = "openbsd"
|
||||||
|
)))]
|
||||||
|
const backlog: libc::c_int = libc::SOMAXCONN;
|
||||||
|
|
||||||
cvt(libc::bind(inner.as_inner().as_raw_fd(), &addr as *const _ as *const _, len as _))?;
|
cvt(libc::bind(inner.as_inner().as_raw_fd(), &addr as *const _ as *const _, len as _))?;
|
||||||
cvt(libc::listen(inner.as_inner().as_raw_fd(), backlog))?;
|
cvt(libc::listen(inner.as_inner().as_raw_fd(), backlog))?;
|
||||||
|
|
|
@ -274,7 +274,7 @@ fn copy_third_party_objects(
|
||||||
) -> Vec<(PathBuf, DependencyType)> {
|
) -> Vec<(PathBuf, DependencyType)> {
|
||||||
let mut target_deps = vec![];
|
let mut target_deps = vec![];
|
||||||
|
|
||||||
if builder.config.sanitizers_enabled(target) && compiler.stage != 0 {
|
if builder.config.needs_sanitizer_runtime_built(target) && compiler.stage != 0 {
|
||||||
// The sanitizers are only copied in stage1 or above,
|
// The sanitizers are only copied in stage1 or above,
|
||||||
// to avoid creating dependency on LLVM.
|
// to avoid creating dependency on LLVM.
|
||||||
target_deps.extend(
|
target_deps.extend(
|
||||||
|
|
|
@ -1937,6 +1937,29 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Special setup to enable running with sanitizers on MSVC.
|
||||||
|
if !builder.config.dry_run()
|
||||||
|
&& target.contains("msvc")
|
||||||
|
&& builder.config.sanitizers_enabled(target)
|
||||||
|
{
|
||||||
|
// Ignore interception failures: not all dlls in the process will have been built with
|
||||||
|
// address sanitizer enabled (e.g., ntdll.dll).
|
||||||
|
cmd.env("ASAN_WIN_CONTINUE_ON_INTERCEPTION_FAILURE", "1");
|
||||||
|
// Add the address sanitizer runtime to the PATH - it is located next to cl.exe.
|
||||||
|
let asan_runtime_path =
|
||||||
|
builder.cc.borrow()[&target].path().parent().unwrap().to_path_buf();
|
||||||
|
let old_path = cmd
|
||||||
|
.get_envs()
|
||||||
|
.find_map(|(k, v)| (k == "PATH").then_some(v))
|
||||||
|
.flatten()
|
||||||
|
.map_or_else(|| env::var_os("PATH").unwrap_or_default(), |v| v.to_owned());
|
||||||
|
let new_path = env::join_paths(
|
||||||
|
env::split_paths(&old_path).chain(std::iter::once(asan_runtime_path)),
|
||||||
|
)
|
||||||
|
.expect("Could not add ASAN runtime path to PATH");
|
||||||
|
cmd.env("PATH", new_path);
|
||||||
|
}
|
||||||
|
|
||||||
// Some UI tests trigger behavior in rustc where it reads $CARGO and changes behavior if it exists.
|
// Some UI tests trigger behavior in rustc where it reads $CARGO and changes behavior if it exists.
|
||||||
// To make the tests work that rely on it not being set, make sure it is not set.
|
// To make the tests work that rely on it not being set, make sure it is not set.
|
||||||
cmd.env_remove("CARGO");
|
cmd.env_remove("CARGO");
|
||||||
|
|
|
@ -2180,8 +2180,15 @@ impl Config {
|
||||||
self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers)
|
self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn any_sanitizers_enabled(&self) -> bool {
|
pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool {
|
||||||
self.target_config.values().any(|t| t.sanitizers == Some(true)) || self.sanitizers
|
// MSVC uses the Microsoft-provided sanitizer runtime, but all other runtimes we build.
|
||||||
|
!target.is_msvc() && self.sanitizers_enabled(target)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn any_sanitizers_to_build(&self) -> bool {
|
||||||
|
self.target_config
|
||||||
|
.iter()
|
||||||
|
.any(|(ts, t)| !ts.is_msvc() && t.sanitizers.unwrap_or(self.sanitizers))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> {
|
pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> {
|
||||||
|
|
|
@ -96,7 +96,7 @@ pub fn check(build: &mut Build) {
|
||||||
})
|
})
|
||||||
.any(|build_llvm_ourselves| build_llvm_ourselves);
|
.any(|build_llvm_ourselves| build_llvm_ourselves);
|
||||||
|
|
||||||
let need_cmake = building_llvm || build.config.any_sanitizers_enabled();
|
let need_cmake = building_llvm || build.config.any_sanitizers_to_build();
|
||||||
if need_cmake && cmd_finder.maybe_have("cmake").is_none() {
|
if need_cmake && cmd_finder.maybe_have("cmake").is_none() {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"
|
"
|
||||||
|
|
|
@ -244,7 +244,7 @@ fn find_resolution(resolutions: &DocLinkResMap, path: &str) -> Option<Res<NodeId
|
||||||
.find_map(|ns| resolutions.get(&(Symbol::intern(path), ns)).copied().flatten())
|
.find_map(|ns| resolutions.get(&(Symbol::intern(path), ns)).copied().flatten())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Collects all neccessary data of link.
|
/// Collects all necessary data of link.
|
||||||
fn collect_link_data(offset_iter: &mut OffsetIter<'_, '_>) -> LinkData {
|
fn collect_link_data(offset_iter: &mut OffsetIter<'_, '_>) -> LinkData {
|
||||||
let mut resolvable_link = None;
|
let mut resolvable_link = None;
|
||||||
let mut resolvable_link_range = None;
|
let mut resolvable_link_range = None;
|
||||||
|
|
33
tests/codegen/maybeuninit-rvo.rs
Normal file
33
tests/codegen/maybeuninit-rvo.rs
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
// compile-flags: -O
|
||||||
|
#![feature(c_unwind)]
|
||||||
|
#![crate_type = "lib"]
|
||||||
|
|
||||||
|
pub struct Foo([u8; 1000]);
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
fn init(p: *mut Foo);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_from_uninit() -> Foo {
|
||||||
|
// CHECK-LABEL: new_from_uninit
|
||||||
|
// CHECK-NOT: call void @llvm.memcpy.
|
||||||
|
let mut x = std::mem::MaybeUninit::uninit();
|
||||||
|
unsafe {
|
||||||
|
init(x.as_mut_ptr());
|
||||||
|
x.assume_init()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C-unwind" {
|
||||||
|
fn init_unwind(p: *mut Foo);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_from_uninit_unwind() -> Foo {
|
||||||
|
// CHECK-LABEL: new_from_uninit
|
||||||
|
// CHECK: call void @llvm.memcpy.
|
||||||
|
let mut x = std::mem::MaybeUninit::uninit();
|
||||||
|
unsafe {
|
||||||
|
init_unwind(x.as_mut_ptr());
|
||||||
|
x.assume_init()
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,15 +1,15 @@
|
||||||
Function name: macro_name_span::affected_function
|
Function name: macro_name_span::affected_function
|
||||||
Raw bytes (9): 0x[01, 01, 00, 01, 01, 06, 1b, 00, 20]
|
Raw bytes (9): 0x[01, 01, 00, 01, 01, 16, 1c, 02, 06]
|
||||||
Number of files: 1
|
Number of files: 1
|
||||||
- file 0 => global file 1
|
- file 0 => global file 1
|
||||||
Number of expressions: 0
|
Number of expressions: 0
|
||||||
Number of file 0 mappings: 1
|
Number of file 0 mappings: 1
|
||||||
- Code(Counter(0)) at (prev + 6, 27) to (start + 0, 32)
|
- Code(Counter(0)) at (prev + 22, 28) to (start + 2, 6)
|
||||||
|
|
||||||
Function name: macro_name_span::main
|
Function name: macro_name_span::main
|
||||||
Raw bytes (9): 0x[01, 02, 00, 01, 01, 0b, 01, 02, 02]
|
Raw bytes (9): 0x[01, 01, 00, 01, 01, 0b, 01, 02, 02]
|
||||||
Number of files: 1
|
Number of files: 1
|
||||||
- file 0 => global file 2
|
- file 0 => global file 1
|
||||||
Number of expressions: 0
|
Number of expressions: 0
|
||||||
Number of file 0 mappings: 1
|
Number of file 0 mappings: 1
|
||||||
- Code(Counter(0)) at (prev + 11, 1) to (start + 2, 2)
|
- Code(Counter(0)) at (prev + 11, 1) to (start + 2, 2)
|
||||||
|
|
|
@ -1,16 +1,3 @@
|
||||||
$DIR/auxiliary/macro_name_span_helper.rs:
|
|
||||||
LL| |// edition: 2021
|
|
||||||
LL| |
|
|
||||||
LL| |#[macro_export]
|
|
||||||
LL| |macro_rules! macro_that_defines_a_function {
|
|
||||||
LL| | (fn $name:ident () $body:tt) => {
|
|
||||||
LL| 1| fn $name () -> () $body
|
|
||||||
LL| | }
|
|
||||||
LL| |}
|
|
||||||
LL| |
|
|
||||||
LL| |// Non-executable comment.
|
|
||||||
|
|
||||||
$DIR/macro_name_span.rs:
|
|
||||||
LL| |// edition: 2021
|
LL| |// edition: 2021
|
||||||
LL| |
|
LL| |
|
||||||
LL| |// Regression test for <https://github.com/rust-lang/rust/issues/117788>.
|
LL| |// Regression test for <https://github.com/rust-lang/rust/issues/117788>.
|
||||||
|
@ -32,8 +19,8 @@ $DIR/macro_name_span.rs:
|
||||||
LL| |}
|
LL| |}
|
||||||
LL| |
|
LL| |
|
||||||
LL| |macro_name_span_helper::macro_that_defines_a_function! {
|
LL| |macro_name_span_helper::macro_that_defines_a_function! {
|
||||||
LL| | fn affected_function() {
|
LL| 1| fn affected_function() {
|
||||||
LL| | macro_with_an_unreasonably_and_egregiously_long_name!();
|
LL| 1| macro_with_an_unreasonably_and_egregiously_long_name!();
|
||||||
LL| | }
|
LL| 1| }
|
||||||
LL| |}
|
LL| |}
|
||||||
|
|
||||||
|
|
|
@ -12,5 +12,5 @@ LOG := $(TMPDIR)/log.txt
|
||||||
|
|
||||||
all:
|
all:
|
||||||
$(RUSTC) -g -Z sanitizer=address --crate-type cdylib --target $(TARGET) library.rs
|
$(RUSTC) -g -Z sanitizer=address --crate-type cdylib --target $(TARGET) library.rs
|
||||||
$(RUSTC) -g -Z sanitizer=address --crate-type bin --target $(TARGET) -llibrary program.rs
|
$(RUSTC) -g -Z sanitizer=address --crate-type bin --target $(TARGET) program.rs
|
||||||
LD_LIBRARY_PATH=$(TMPDIR) $(TMPDIR)/program 2>&1 | $(CGREP) stack-buffer-overflow
|
LD_LIBRARY_PATH=$(TMPDIR) $(TMPDIR)/program 2>&1 | $(CGREP) stack-buffer-overflow
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#[cfg_attr(windows, link(name = "library.dll.lib", modifiers = "+verbatim"))]
|
||||||
|
#[cfg_attr(not(windows), link(name = "library"))]
|
||||||
extern "C" {
|
extern "C" {
|
||||||
fn overflow();
|
fn overflow();
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,5 +12,5 @@ LOG := $(TMPDIR)/log.txt
|
||||||
|
|
||||||
all:
|
all:
|
||||||
$(RUSTC) -g -Z sanitizer=address --crate-type dylib --target $(TARGET) library.rs
|
$(RUSTC) -g -Z sanitizer=address --crate-type dylib --target $(TARGET) library.rs
|
||||||
$(RUSTC) -g -Z sanitizer=address --crate-type bin --target $(TARGET) -llibrary program.rs
|
$(RUSTC) -g -Z sanitizer=address --crate-type bin --target $(TARGET) program.rs
|
||||||
LD_LIBRARY_PATH=$(TMPDIR) $(TMPDIR)/program 2>&1 | $(CGREP) stack-buffer-overflow
|
LD_LIBRARY_PATH=$(TMPDIR) $(TMPDIR)/program 2>&1 | $(CGREP) stack-buffer-overflow
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#[cfg_attr(windows, link(name = "library.dll.lib", modifiers = "+verbatim"))]
|
||||||
|
#[cfg_attr(not(windows), link(name = "library"))]
|
||||||
extern "C" {
|
extern "C" {
|
||||||
fn overflow();
|
fn overflow();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#[link(name = "library")]
|
#[link(name = "library", kind = "static")]
|
||||||
extern "C" {
|
extern "C" {
|
||||||
fn overflow();
|
fn overflow();
|
||||||
}
|
}
|
||||||
|
|
|
@ -120,6 +120,27 @@ fn main() {
|
||||||
// is there an example that is valid x86 for this test?
|
// is there an example that is valid x86 for this test?
|
||||||
asm!(":bbb nop");
|
asm!(":bbb nop");
|
||||||
|
|
||||||
|
// non-ascii characters are not allowed in labels, so should not trigger the lint
|
||||||
|
asm!("Ù: nop");
|
||||||
|
asm!("testÙ: nop");
|
||||||
|
asm!("_Ù_: nop");
|
||||||
|
|
||||||
|
// Format arguments should be conservatively assumed to be valid characters in labels
|
||||||
|
// Would emit `test_rax:` or similar
|
||||||
|
#[allow(asm_sub_register)]
|
||||||
|
{
|
||||||
|
asm!("test_{}: nop", in(reg) 10); //~ ERROR avoid using named labels
|
||||||
|
}
|
||||||
|
asm!("test_{}: nop", const 10); //~ ERROR avoid using named labels
|
||||||
|
asm!("test_{}: nop", sym main); //~ ERROR avoid using named labels
|
||||||
|
asm!("{}_test: nop", const 10); //~ ERROR avoid using named labels
|
||||||
|
asm!("test_{}_test: nop", const 10); //~ ERROR avoid using named labels
|
||||||
|
asm!("{}: nop", const 10); //~ ERROR avoid using named labels
|
||||||
|
|
||||||
|
asm!("{uwu}: nop", uwu = const 10); //~ ERROR avoid using named labels
|
||||||
|
asm!("{0}: nop", const 10); //~ ERROR avoid using named labels
|
||||||
|
asm!("{1}: nop", "/* {0} */", const 10, const 20); //~ ERROR avoid using named labels
|
||||||
|
|
||||||
// Test include_str in asm
|
// Test include_str in asm
|
||||||
asm!(include_str!("named-asm-labels.s")); //~ ERROR avoid using named labels
|
asm!(include_str!("named-asm-labels.s")); //~ ERROR avoid using named labels
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,88 @@ LL | ab: nop // ab: does foo
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:124:14
|
--> $DIR/named-asm-labels.rs:132:19
|
||||||
|
|
|
||||||
|
LL | asm!("test_{}: nop", in(reg) 10);
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:134:15
|
||||||
|
|
|
||||||
|
LL | asm!("test_{}: nop", const 10);
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:135:15
|
||||||
|
|
|
||||||
|
LL | asm!("test_{}: nop", sym main);
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:136:15
|
||||||
|
|
|
||||||
|
LL | asm!("{}_test: nop", const 10);
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:137:15
|
||||||
|
|
|
||||||
|
LL | asm!("test_{}_test: nop", const 10);
|
||||||
|
| ^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:138:15
|
||||||
|
|
|
||||||
|
LL | asm!("{}: nop", const 10);
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:140:15
|
||||||
|
|
|
||||||
|
LL | asm!("{uwu}: nop", uwu = const 10);
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:141:15
|
||||||
|
|
|
||||||
|
LL | asm!("{0}: nop", const 10);
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:142:15
|
||||||
|
|
|
||||||
|
LL | asm!("{1}: nop", "/* {0} */", const 10, const 20);
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
|
error: avoid using named labels in inline assembly
|
||||||
|
--> $DIR/named-asm-labels.rs:145:14
|
||||||
|
|
|
|
||||||
LL | asm!(include_str!("named-asm-labels.s"));
|
LL | asm!(include_str!("named-asm-labels.s"));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
@ -254,7 +335,7 @@ LL | asm!(include_str!("named-asm-labels.s"));
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
warning: avoid using named labels in inline assembly
|
warning: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:134:19
|
--> $DIR/named-asm-labels.rs:155:19
|
||||||
|
|
|
|
||||||
LL | asm!("warned: nop");
|
LL | asm!("warned: nop");
|
||||||
| ^^^^^^
|
| ^^^^^^
|
||||||
|
@ -262,13 +343,13 @@ LL | asm!("warned: nop");
|
||||||
= help: only local labels of the form `<number>:` should be used in inline asm
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
note: the lint level is defined here
|
note: the lint level is defined here
|
||||||
--> $DIR/named-asm-labels.rs:132:16
|
--> $DIR/named-asm-labels.rs:153:16
|
||||||
|
|
|
|
||||||
LL | #[warn(named_asm_labels)]
|
LL | #[warn(named_asm_labels)]
|
||||||
| ^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:143:20
|
--> $DIR/named-asm-labels.rs:164:20
|
||||||
|
|
|
|
||||||
LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) }
|
LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) }
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
@ -277,7 +358,7 @@ LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noret
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:149:20
|
--> $DIR/named-asm-labels.rs:170:20
|
||||||
|
|
|
|
||||||
LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noreturn)) }
|
LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noreturn)) }
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
@ -286,7 +367,7 @@ LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noret
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:157:20
|
--> $DIR/named-asm-labels.rs:178:20
|
||||||
|
|
|
|
||||||
LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) }
|
LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) }
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
@ -295,7 +376,7 @@ LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) }
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:167:24
|
--> $DIR/named-asm-labels.rs:188:24
|
||||||
|
|
|
|
||||||
LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) }
|
LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) }
|
||||||
| ^^^^^
|
| ^^^^^
|
||||||
|
@ -304,7 +385,7 @@ LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) }
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:176:15
|
--> $DIR/named-asm-labels.rs:197:15
|
||||||
|
|
|
|
||||||
LL | asm!("closure1: nop");
|
LL | asm!("closure1: nop");
|
||||||
| ^^^^^^^^
|
| ^^^^^^^^
|
||||||
|
@ -313,7 +394,7 @@ LL | asm!("closure1: nop");
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:180:15
|
--> $DIR/named-asm-labels.rs:201:15
|
||||||
|
|
|
|
||||||
LL | asm!("closure2: nop");
|
LL | asm!("closure2: nop");
|
||||||
| ^^^^^^^^
|
| ^^^^^^^^
|
||||||
|
@ -322,7 +403,7 @@ LL | asm!("closure2: nop");
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: avoid using named labels in inline assembly
|
error: avoid using named labels in inline assembly
|
||||||
--> $DIR/named-asm-labels.rs:190:19
|
--> $DIR/named-asm-labels.rs:211:19
|
||||||
|
|
|
|
||||||
LL | asm!("closure3: nop");
|
LL | asm!("closure3: nop");
|
||||||
| ^^^^^^^^
|
| ^^^^^^^^
|
||||||
|
@ -330,5 +411,5 @@ LL | asm!("closure3: nop");
|
||||||
= help: only local labels of the form `<number>:` should be used in inline asm
|
= help: only local labels of the form `<number>:` should be used in inline asm
|
||||||
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
= note: see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information
|
||||||
|
|
||||||
error: aborting due to 35 previous errors; 1 warning emitted
|
error: aborting due to 44 previous errors; 1 warning emitted
|
||||||
|
|
||||||
|
|
|
@ -270,11 +270,6 @@ note: an implementation of `PartialEq<&&{integer}>` might be missing for `Foo`
|
||||||
|
|
|
|
||||||
LL | struct Foo;
|
LL | struct Foo;
|
||||||
| ^^^^^^^^^^ must implement `PartialEq<&&{integer}>`
|
| ^^^^^^^^^^ must implement `PartialEq<&&{integer}>`
|
||||||
help: consider annotating `Foo` with `#[derive(PartialEq)]`
|
|
||||||
|
|
|
||||||
LL + #[derive(PartialEq)]
|
|
||||||
LL | struct Foo;
|
|
||||||
|
|
|
||||||
|
|
||||||
error[E0277]: can't compare `&String` with `str`
|
error[E0277]: can't compare `&String` with `str`
|
||||||
--> $DIR/binary-op-suggest-deref.rs:69:20
|
--> $DIR/binary-op-suggest-deref.rs:69:20
|
||||||
|
|
|
@ -8,10 +8,10 @@ LL | let _ = || yield true;
|
||||||
= help: add `#![feature(coroutines)]` to the crate attributes to enable
|
= help: add `#![feature(coroutines)]` to the crate attributes to enable
|
||||||
|
|
||||||
error[E0282]: type annotations needed
|
error[E0282]: type annotations needed
|
||||||
--> $DIR/gen_block.rs:6:17
|
--> $DIR/gen_block.rs:6:13
|
||||||
|
|
|
|
||||||
LL | let x = gen {};
|
LL | let x = gen {};
|
||||||
| ^^ cannot infer type
|
| ^^^^^^ cannot infer type
|
||||||
|
|
||||||
error: aborting due to 2 previous errors
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
error[E0277]: the size for values of type `str` cannot be known at compilation time
|
error[E0277]: the size for values of type `str` cannot be known at compilation time
|
||||||
--> $DIR/sized-yield.rs:8:27
|
--> $DIR/sized-yield.rs:8:19
|
||||||
|
|
|
|
||||||
LL | let mut gen = move || {
|
LL | let mut gen = move || {
|
||||||
| ___________________________^
|
| ___________________^
|
||||||
LL | |
|
LL | |
|
||||||
LL | | yield s[..];
|
LL | | yield s[..];
|
||||||
LL | | };
|
LL | | };
|
||||||
|
|
|
@ -35,16 +35,16 @@ LL | async gen {};
|
||||||
= help: add `#![feature(gen_blocks)]` to the crate attributes to enable
|
= help: add `#![feature(gen_blocks)]` to the crate attributes to enable
|
||||||
|
|
||||||
error[E0282]: type annotations needed
|
error[E0282]: type annotations needed
|
||||||
--> $DIR/feature-gate-gen_blocks.rs:5:9
|
--> $DIR/feature-gate-gen_blocks.rs:5:5
|
||||||
|
|
|
|
||||||
LL | gen {};
|
LL | gen {};
|
||||||
| ^^ cannot infer type
|
| ^^^^^^ cannot infer type
|
||||||
|
|
||||||
error[E0282]: type annotations needed
|
error[E0282]: type annotations needed
|
||||||
--> $DIR/feature-gate-gen_blocks.rs:12:15
|
--> $DIR/feature-gate-gen_blocks.rs:12:5
|
||||||
|
|
|
|
||||||
LL | async gen {};
|
LL | async gen {};
|
||||||
| ^^ cannot infer type
|
| ^^^^^^^^^^^^ cannot infer type
|
||||||
|
|
||||||
error: aborting due to 6 previous errors
|
error: aborting due to 6 previous errors
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,7 @@ fn main() {
|
||||||
for x in -9 + 1..=(9 - 2) {
|
for x in -9 + 1..=(9 - 2) {
|
||||||
match x as i32 {
|
match x as i32 {
|
||||||
0..=(5+1) => errors_only.push(x),
|
0..=(5+1) => errors_only.push(x),
|
||||||
//~^ error: inclusive range with no end
|
//~^ error: expected `)`, found `+`
|
||||||
//~| error: expected one of `=>`, `if`, or `|`, found `(`
|
|
||||||
1 | -3..0 => first_or.push(x),
|
1 | -3..0 => first_or.push(x),
|
||||||
y @ (0..5 | 6) => or_two.push(y),
|
y @ (0..5 | 6) => or_two.push(y),
|
||||||
y @ 0..const { 5 + 1 } => assert_eq!(y, 5),
|
y @ 0..const { 5 + 1 } => assert_eq!(y, 5),
|
||||||
|
|
|
@ -1,17 +1,8 @@
|
||||||
error[E0586]: inclusive range with no end
|
error: expected `)`, found `+`
|
||||||
--> $DIR/range_pat_interactions2.rs:10:14
|
--> $DIR/range_pat_interactions2.rs:10:19
|
||||||
|
|
|
|
||||||
LL | 0..=(5+1) => errors_only.push(x),
|
LL | 0..=(5+1) => errors_only.push(x),
|
||||||
| ^^^ help: use `..` instead
|
| ^ expected `)`
|
||||||
|
|
|
||||||
= note: inclusive ranges must be bounded at the end (`..=b` or `a..=b`)
|
|
||||||
|
|
||||||
error: expected one of `=>`, `if`, or `|`, found `(`
|
error: aborting due to 1 previous error
|
||||||
--> $DIR/range_pat_interactions2.rs:10:17
|
|
||||||
|
|
|
||||||
LL | 0..=(5+1) => errors_only.push(x),
|
|
||||||
| ^ expected one of `=>`, `if`, or `|`
|
|
||||||
|
|
||||||
error: aborting due to 2 previous errors
|
|
||||||
|
|
||||||
For more information about this error, try `rustc --explain E0586`.
|
|
||||||
|
|
|
@ -11,11 +11,6 @@ note: an implementation of `PartialEq<fn(()) -> A {A::Value}>` might be missing
|
||||||
|
|
|
|
||||||
LL | enum A {
|
LL | enum A {
|
||||||
| ^^^^^^ must implement `PartialEq<fn(()) -> A {A::Value}>`
|
| ^^^^^^ must implement `PartialEq<fn(()) -> A {A::Value}>`
|
||||||
help: consider annotating `A` with `#[derive(PartialEq)]`
|
|
||||||
|
|
|
||||||
LL + #[derive(PartialEq)]
|
|
||||||
LL | enum A {
|
|
||||||
|
|
|
||||||
help: use parentheses to construct this tuple variant
|
help: use parentheses to construct this tuple variant
|
||||||
|
|
|
|
||||||
LL | a == A::Value(/* () */);
|
LL | a == A::Value(/* () */);
|
||||||
|
|
|
@ -6,8 +6,8 @@ LL | macro_rules! $macro_name {
|
||||||
|
|
|
|
||||||
help: change the delimiters to curly braces
|
help: change the delimiters to curly braces
|
||||||
|
|
|
|
||||||
LL | macro_rules! {} {
|
LL | macro_rules! {$macro_name} {
|
||||||
| ~ +
|
| + +
|
||||||
help: add a semicolon
|
help: add a semicolon
|
||||||
|
|
|
|
||||||
LL | macro_rules! $macro_name; {
|
LL | macro_rules! $macro_name; {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
error: missing condition for `if` expression
|
error: missing condition for `if` expression
|
||||||
--> $DIR/issue-68091-unicode-ident-after-if.rs:3:14
|
--> $DIR/issue-68091-unicode-ident-after-if.rs:3:13
|
||||||
|
|
|
|
||||||
LL | $($c)ö* {}
|
LL | $($c)ö* {}
|
||||||
| ^ - if this block is the condition of the `if` expression, then it must be followed by another block
|
| ^ - if this block is the condition of the `if` expression, then it must be followed by another block
|
||||||
| |
|
| |
|
||||||
| expected condition here
|
| expected condition here
|
||||||
|
|
||||||
error: aborting due to 1 previous error
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
error: macro expansion ends with an incomplete expression: expected expression
|
error: macro expansion ends with an incomplete expression: expected expression
|
||||||
--> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:3:14
|
--> $DIR/issue-68092-unicode-ident-after-incomplete-expr.rs:3:13
|
||||||
|
|
|
|
||||||
LL | $($c)ö*
|
LL | $($c)ö*
|
||||||
| ^ expected expression
|
| ^ expected expression
|
||||||
|
|
||||||
error: aborting due to 1 previous error
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
|
19
tests/ui/parser/pat-recover-ranges.rs
Normal file
19
tests/ui/parser/pat-recover-ranges.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
fn main() {
|
||||||
|
match -1 {
|
||||||
|
0..=1 => (),
|
||||||
|
0..=(1) => (),
|
||||||
|
//~^ error: range pattern bounds cannot have parentheses
|
||||||
|
(-12)..=4 => (),
|
||||||
|
//~^ error: range pattern bounds cannot have parentheses
|
||||||
|
(0)..=(-4) => (),
|
||||||
|
//~^ error: range pattern bounds cannot have parentheses
|
||||||
|
//~| error: range pattern bounds cannot have parentheses
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! m {
|
||||||
|
($pat:pat) => {};
|
||||||
|
(($s:literal)..($e:literal)) => {};
|
||||||
|
}
|
||||||
|
|
||||||
|
m!((7)..(7));
|
50
tests/ui/parser/pat-recover-ranges.stderr
Normal file
50
tests/ui/parser/pat-recover-ranges.stderr
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
error: range pattern bounds cannot have parentheses
|
||||||
|
--> $DIR/pat-recover-ranges.rs:4:13
|
||||||
|
|
|
||||||
|
LL | 0..=(1) => (),
|
||||||
|
| ^ ^
|
||||||
|
|
|
||||||
|
help: remove these parentheses
|
||||||
|
|
|
||||||
|
LL - 0..=(1) => (),
|
||||||
|
LL + 0..=1 => (),
|
||||||
|
|
|
||||||
|
|
||||||
|
error: range pattern bounds cannot have parentheses
|
||||||
|
--> $DIR/pat-recover-ranges.rs:6:9
|
||||||
|
|
|
||||||
|
LL | (-12)..=4 => (),
|
||||||
|
| ^ ^
|
||||||
|
|
|
||||||
|
help: remove these parentheses
|
||||||
|
|
|
||||||
|
LL - (-12)..=4 => (),
|
||||||
|
LL + -12..=4 => (),
|
||||||
|
|
|
||||||
|
|
||||||
|
error: range pattern bounds cannot have parentheses
|
||||||
|
--> $DIR/pat-recover-ranges.rs:8:9
|
||||||
|
|
|
||||||
|
LL | (0)..=(-4) => (),
|
||||||
|
| ^ ^
|
||||||
|
|
|
||||||
|
help: remove these parentheses
|
||||||
|
|
|
||||||
|
LL - (0)..=(-4) => (),
|
||||||
|
LL + 0..=(-4) => (),
|
||||||
|
|
|
||||||
|
|
||||||
|
error: range pattern bounds cannot have parentheses
|
||||||
|
--> $DIR/pat-recover-ranges.rs:8:15
|
||||||
|
|
|
||||||
|
LL | (0)..=(-4) => (),
|
||||||
|
| ^ ^
|
||||||
|
|
|
||||||
|
help: remove these parentheses
|
||||||
|
|
|
||||||
|
LL - (0)..=(-4) => (),
|
||||||
|
LL + (0)..=-4 => (),
|
||||||
|
|
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
|
@ -271,7 +271,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||||
span: $DIR/capture-macro-rules-invoke.rs:47:19: 47:20 (#0),
|
span: $DIR/capture-macro-rules-invoke.rs:47:19: 47:20 (#0),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
span: $DIR/capture-macro-rules-invoke.rs:47:13: 47:22 (#0),
|
span: $DIR/capture-macro-rules-invoke.rs:15:60: 15:63 (#0),
|
||||||
},
|
},
|
||||||
Punct {
|
Punct {
|
||||||
ch: ',',
|
ch: ',',
|
||||||
|
|
|
@ -37,7 +37,7 @@ expand_expr_is!("hello", stringify!(hello));
|
||||||
expand_expr_is!("10 + 20", stringify!(10 + 20));
|
expand_expr_is!("10 + 20", stringify!(10 + 20));
|
||||||
|
|
||||||
macro_rules! echo_tts {
|
macro_rules! echo_tts {
|
||||||
($($t:tt)*) => { $($t)* }; //~ ERROR: expected expression, found `$`
|
($($t:tt)*) => { $($t)* };
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! echo_lit {
|
macro_rules! echo_lit {
|
||||||
|
@ -109,7 +109,7 @@ expand_expr_fail!("string"; hello); //~ ERROR: expected one of `.`, `?`, or an o
|
||||||
|
|
||||||
// Invalid expressions produce errors in addition to returning `Err(())`.
|
// Invalid expressions produce errors in addition to returning `Err(())`.
|
||||||
expand_expr_fail!($); //~ ERROR: expected expression, found `$`
|
expand_expr_fail!($); //~ ERROR: expected expression, found `$`
|
||||||
expand_expr_fail!(echo_tts!($));
|
expand_expr_fail!(echo_tts!($)); //~ ERROR: expected expression, found `$`
|
||||||
expand_expr_fail!(echo_pm!($)); //~ ERROR: expected expression, found `$`
|
expand_expr_fail!(echo_pm!($)); //~ ERROR: expected expression, found `$`
|
||||||
|
|
||||||
// We get errors reported and recover during macro expansion if the macro
|
// We get errors reported and recover during macro expansion if the macro
|
||||||
|
|
|
@ -11,10 +11,10 @@ LL | expand_expr_fail!($);
|
||||||
| ^ expected expression
|
| ^ expected expression
|
||||||
|
|
||||||
error: expected expression, found `$`
|
error: expected expression, found `$`
|
||||||
--> $DIR/expand-expr.rs:40:23
|
--> $DIR/expand-expr.rs:112:29
|
||||||
|
|
|
|
||||||
LL | ($($t:tt)*) => { $($t)* };
|
LL | expand_expr_fail!(echo_tts!($));
|
||||||
| ^^^^ expected expression
|
| ^ expected expression
|
||||||
|
|
||||||
error: expected expression, found `$`
|
error: expected expression, found `$`
|
||||||
--> $DIR/expand-expr.rs:113:28
|
--> $DIR/expand-expr.rs:113:28
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#![feature(const_trait_impl)]
|
#![feature(const_trait_impl, effects)]
|
||||||
// edition: 2021
|
// edition: 2021
|
||||||
|
|
||||||
#[const_trait]
|
#[const_trait]
|
||||||
|
@ -6,4 +6,12 @@ trait Trait {}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let _: &dyn const Trait; //~ ERROR const trait bounds are not allowed in trait object types
|
let _: &dyn const Trait; //~ ERROR const trait bounds are not allowed in trait object types
|
||||||
|
let _: &dyn ~const Trait; //~ ERROR `~const` is not allowed here
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Regression test for issue #119525.
|
||||||
|
trait NonConst {}
|
||||||
|
const fn handle(_: &dyn const NonConst) {}
|
||||||
|
//~^ ERROR const trait bounds are not allowed in trait object types
|
||||||
|
const fn take(_: &dyn ~const NonConst) {}
|
||||||
|
//~^ ERROR `~const` is not allowed here
|
||||||
|
|
|
@ -4,5 +4,27 @@ error: const trait bounds are not allowed in trait object types
|
||||||
LL | let _: &dyn const Trait;
|
LL | let _: &dyn const Trait;
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
||||||
error: aborting due to 1 previous error
|
error: `~const` is not allowed here
|
||||||
|
--> $DIR/const-trait-bounds-trait-objects.rs:9:17
|
||||||
|
|
|
||||||
|
LL | let _: &dyn ~const Trait;
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
= note: trait objects cannot have `~const` trait bounds
|
||||||
|
|
||||||
|
error: const trait bounds are not allowed in trait object types
|
||||||
|
--> $DIR/const-trait-bounds-trait-objects.rs:14:25
|
||||||
|
|
|
||||||
|
LL | const fn handle(_: &dyn const NonConst) {}
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
error: `~const` is not allowed here
|
||||||
|
--> $DIR/const-trait-bounds-trait-objects.rs:16:23
|
||||||
|
|
|
||||||
|
LL | const fn take(_: &dyn ~const NonConst) {}
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
= note: trait objects cannot have `~const` trait bounds
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
// compile-flags: -Z sanitizer=address -O
|
// compile-flags: -Z sanitizer=address -O
|
||||||
//
|
//
|
||||||
// run-fail
|
// run-fail
|
||||||
// error-pattern: AddressSanitizer: SEGV
|
// regex-error-pattern: AddressSanitizer: (SEGV|attempting free on address which was not malloc)
|
||||||
|
|
||||||
use std::ffi::c_void;
|
use std::ffi::c_void;
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue