Auto merge of #3270 - rust-lang:rustup-2024-01-21, r=RalfJung
Automatic Rustup
This commit is contained in:
commit
70e720bc68
1103 changed files with 17093 additions and 8620 deletions
39
Cargo.lock
39
Cargo.lock
|
@ -714,9 +714,9 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335"
|
|||
|
||||
[[package]]
|
||||
name = "compiler_builtins"
|
||||
version = "0.1.104"
|
||||
version = "0.1.105"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "99c3f9035afc33f4358773239573f7d121099856753e1bbd2a6a5207098fc741"
|
||||
checksum = "3686cc48897ce1950aa70fd595bd2dc9f767a3c4cca4cd17b2cb52a2d37e6eb4"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"rustc-std-workspace-core",
|
||||
|
@ -2187,16 +2187,6 @@ dependencies = [
|
|||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.8.1"
|
||||
|
@ -2366,9 +2356,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "measureme"
|
||||
version = "10.1.2"
|
||||
version = "11.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45e381dcdad44c3c435f8052b08c5c4a1449c48ab56f312345eae12d7a693dbe"
|
||||
checksum = "dfa4a40f09af7aa6faef38285402a78847d0d72bf8827006cd2a332e1e6e4a8d"
|
||||
dependencies = [
|
||||
"log",
|
||||
"memmap2",
|
||||
|
@ -2479,7 +2469,7 @@ dependencies = [
|
|||
"lazy_static",
|
||||
"libc",
|
||||
"libffi",
|
||||
"libloading 0.8.1",
|
||||
"libloading",
|
||||
"log",
|
||||
"measureme",
|
||||
"rand",
|
||||
|
@ -3879,6 +3869,7 @@ dependencies = [
|
|||
name = "rustc_hir_analysis"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"itertools",
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
|
@ -3917,6 +3908,7 @@ dependencies = [
|
|||
name = "rustc_hir_typeck"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"itertools",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
"rustc_data_structures",
|
||||
|
@ -4003,7 +3995,7 @@ dependencies = [
|
|||
name = "rustc_interface"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"libloading 0.7.4",
|
||||
"libloading",
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc_ast",
|
||||
|
@ -4133,7 +4125,7 @@ name = "rustc_metadata"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.4.1",
|
||||
"libloading 0.7.4",
|
||||
"libloading",
|
||||
"odht",
|
||||
"rustc_ast",
|
||||
"rustc_attr",
|
||||
|
@ -4200,6 +4192,7 @@ name = "rustc_mir_build"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"either",
|
||||
"itertools",
|
||||
"rustc_apfloat",
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
|
@ -4352,6 +4345,7 @@ name = "rustc_pattern_analysis"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"derivative",
|
||||
"rustc-hash",
|
||||
"rustc_apfloat",
|
||||
"rustc_arena",
|
||||
"rustc_data_structures",
|
||||
|
@ -4366,7 +4360,6 @@ dependencies = [
|
|||
"rustc_target",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"typed-arena",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5351,9 +5344,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "thin-vec"
|
||||
version = "0.2.12"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aac81b6fd6beb5884b0cf3321b8117e6e5d47ecb6fc89f414cfdcca8b2fe2dd8"
|
||||
checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b"
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
|
@ -5705,12 +5698,6 @@ dependencies = [
|
|||
"rustc-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typed-arena"
|
||||
version = "2.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.16.0"
|
||||
|
|
|
@ -49,7 +49,14 @@ bitflags! {
|
|||
| ReprFlags::IS_LINEAR.bits();
|
||||
}
|
||||
}
|
||||
rustc_data_structures::external_bitflags_debug! { ReprFlags }
|
||||
|
||||
// This is the same as `rustc_data_structures::external_bitflags_debug` but without the
|
||||
// `rustc_data_structures` to make it build on stable.
|
||||
impl std::fmt::Debug for ReprFlags {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
bitflags::parser::to_writer(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
|
||||
|
|
|
@ -625,7 +625,8 @@ impl Pat {
|
|||
| PatKind::Range(..)
|
||||
| PatKind::Ident(..)
|
||||
| PatKind::Path(..)
|
||||
| PatKind::MacCall(_) => {}
|
||||
| PatKind::MacCall(_)
|
||||
| PatKind::Err(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -809,6 +810,9 @@ pub enum PatKind {
|
|||
|
||||
/// A macro pattern; pre-expansion.
|
||||
MacCall(P<MacCall>),
|
||||
|
||||
/// Placeholder for a pattern that wasn't syntactically well formed in some way.
|
||||
Err(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
/// Whether the `..` is present in a struct fields pattern.
|
||||
|
@ -3300,9 +3304,13 @@ mod size_asserts {
|
|||
static_assert_size!(Impl, 136);
|
||||
static_assert_size!(Item, 136);
|
||||
static_assert_size!(ItemKind, 64);
|
||||
static_assert_size!(LitKind, 24);
|
||||
// This can be removed after i128:128 is in the bootstrap compiler's target.
|
||||
#[cfg(not(bootstrap))]
|
||||
static_assert_size!(LitKind, 32);
|
||||
static_assert_size!(Local, 72);
|
||||
static_assert_size!(MetaItemLit, 40);
|
||||
// This can be removed after i128:128 is in the bootstrap compiler's target.
|
||||
#[cfg(not(bootstrap))]
|
||||
static_assert_size!(MetaItemLit, 48);
|
||||
static_assert_size!(Param, 40);
|
||||
static_assert_size!(Pat, 72);
|
||||
static_assert_size!(Path, 24);
|
||||
|
|
|
@ -1267,7 +1267,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
|
|||
let Pat { id, kind, span, tokens } = pat.deref_mut();
|
||||
vis.visit_id(id);
|
||||
match kind {
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never => {}
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never | PatKind::Err(_) => {}
|
||||
PatKind::Ident(_binding_mode, ident, sub) => {
|
||||
vis.visit_ident(ident);
|
||||
visit_opt(sub, |sub| vis.visit_pat(sub));
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
// Predicates on exprs and stmts that the pretty-printer and parser use
|
||||
|
||||
use crate::ast;
|
||||
use crate::{ast, token::Delimiter};
|
||||
|
||||
/// Does this expression require a semicolon to be treated
|
||||
/// as a statement? The negation of this: 'can this expression
|
||||
|
@ -59,8 +59,12 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
|
|||
| While(..)
|
||||
| ConstBlock(_) => break Some(expr),
|
||||
|
||||
// FIXME: These can end in `}`, but changing these would break stable code.
|
||||
InlineAsm(_) | OffsetOf(_, _) | MacCall(_) | IncludedBytes(_) | FormatArgs(_) => {
|
||||
MacCall(mac) => {
|
||||
break (mac.args.delim == Delimiter::Brace).then_some(expr);
|
||||
}
|
||||
|
||||
InlineAsm(_) | OffsetOf(_, _) | IncludedBytes(_) | FormatArgs(_) => {
|
||||
// These should have been denied pre-expansion.
|
||||
break None;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ use rustc_lexer::unescape::{
|
|||
};
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use rustc_span::Span;
|
||||
use std::ops::Range;
|
||||
use std::{ascii, fmt, str};
|
||||
|
||||
// Escapes a string, represented as a symbol. Reuses the original symbol,
|
||||
|
@ -39,7 +38,6 @@ pub enum LitError {
|
|||
InvalidFloatSuffix,
|
||||
NonDecimalFloat(u32),
|
||||
IntTooLarge(u32),
|
||||
NulInCStr(Range<usize>),
|
||||
}
|
||||
|
||||
impl LitKind {
|
||||
|
@ -156,10 +154,7 @@ impl LitKind {
|
|||
let s = symbol.as_str();
|
||||
let mut buf = Vec::with_capacity(s.len());
|
||||
let mut error = Ok(());
|
||||
unescape_c_string(s, Mode::CStr, &mut |span, c| match c {
|
||||
Ok(CStrUnit::Byte(0) | CStrUnit::Char('\0')) => {
|
||||
error = Err(LitError::NulInCStr(span));
|
||||
}
|
||||
unescape_c_string(s, Mode::CStr, &mut |_span, c| match c {
|
||||
Ok(CStrUnit::Byte(b)) => buf.push(b),
|
||||
Ok(CStrUnit::Char(c)) => {
|
||||
buf.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes())
|
||||
|
@ -179,10 +174,7 @@ impl LitKind {
|
|||
// can convert the symbol directly to a `Lrc<u8>` on success.
|
||||
let s = symbol.as_str();
|
||||
let mut error = Ok(());
|
||||
unescape_c_string(s, Mode::RawCStr, &mut |span, c| match c {
|
||||
Ok(CStrUnit::Byte(0) | CStrUnit::Char('\0')) => {
|
||||
error = Err(LitError::NulInCStr(span));
|
||||
}
|
||||
unescape_c_string(s, Mode::RawCStr, &mut |_, c| match c {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
if err.is_fatal() {
|
||||
|
|
|
@ -568,7 +568,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) {
|
|||
walk_list!(visitor, visit_expr, lower_bound);
|
||||
walk_list!(visitor, visit_expr, upper_bound);
|
||||
}
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never => {}
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never | PatKind::Err(_) => {}
|
||||
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
|
||||
walk_list!(visitor, visit_pat, elems);
|
||||
}
|
||||
|
|
|
@ -14,10 +14,6 @@ ast_lowering_assoc_ty_parentheses =
|
|||
ast_lowering_async_coroutines_not_supported =
|
||||
`async` coroutines are not yet supported
|
||||
|
||||
ast_lowering_async_non_move_closure_not_supported =
|
||||
`async` non-`move` closures with parameters are not currently supported
|
||||
.help = consider using `let` statements to manually capture variables by reference before entering an `async move` closure
|
||||
|
||||
ast_lowering_att_syntax_only_x86 =
|
||||
the `att_syntax` option is only supported on x86
|
||||
|
||||
|
|
|
@ -145,14 +145,6 @@ pub struct ClosureCannotBeStatic {
|
|||
pub fn_decl_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic, Clone, Copy)]
|
||||
#[help]
|
||||
#[diag(ast_lowering_async_non_move_closure_not_supported, code = "E0708")]
|
||||
pub struct AsyncNonMoveClosureNotSupported {
|
||||
#[primary_span]
|
||||
pub fn_decl_span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic, Clone, Copy)]
|
||||
#[diag(ast_lowering_functional_record_update_destructuring_assignment)]
|
||||
pub struct FunctionalRecordUpdateDestructuringAssignment {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::errors::{
|
||||
AsyncCoroutinesNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
||||
BaseExpressionDoubleDot, ClosureCannotBeStatic, CoroutineTooManyParameters,
|
||||
AsyncCoroutinesNotSupported, AwaitOnlyInAsyncFnAndBlocks, BaseExpressionDoubleDot,
|
||||
ClosureCannotBeStatic, CoroutineTooManyParameters,
|
||||
FunctionalRecordUpdateDestructuringAssignment, InclusiveRangeWithNoEnd, MatchArmWithNoBody,
|
||||
NeverPatternWithBody, NeverPatternWithGuard, NotSupportedForLifetimeBinderAsyncClosure,
|
||||
UnderscoreExprLhsAssign,
|
||||
|
@ -13,7 +13,6 @@ use rustc_ast::*;
|
|||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_session::errors::report_lit_error;
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
|
@ -1028,28 +1027,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
fn_decl_span: Span,
|
||||
fn_arg_span: Span,
|
||||
) -> hir::ExprKind<'hir> {
|
||||
let CoroutineKind::Async { closure_id: inner_closure_id, .. } = coroutine_kind else {
|
||||
span_bug!(fn_decl_span, "`async gen` and `gen` closures are not supported, yet");
|
||||
};
|
||||
|
||||
if let &ClosureBinder::For { span, .. } = binder {
|
||||
self.dcx().emit_err(NotSupportedForLifetimeBinderAsyncClosure { span });
|
||||
}
|
||||
|
||||
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
||||
|
||||
let outer_decl =
|
||||
FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
|
||||
|
||||
let body = self.with_new_scopes(fn_decl_span, |this| {
|
||||
// FIXME(cramertj): allow `async` non-`move` closures with arguments.
|
||||
if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
|
||||
this.dcx().emit_err(AsyncNonMoveClosureNotSupported { fn_decl_span });
|
||||
}
|
||||
|
||||
// Transform `async |x: u8| -> X { ... }` into
|
||||
// `|x: u8| || -> X { ... }`.
|
||||
let body_id = this.lower_fn_body(&outer_decl, |this| {
|
||||
let body_id = this.lower_body(|this| {
|
||||
let async_ret_ty = if let FnRetTy::Ty(ty) = &decl.output {
|
||||
let itctx = ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock);
|
||||
Some(hir::FnRetTy::Return(this.lower_ty(ty, &itctx)))
|
||||
|
@ -1057,22 +1044,26 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
None
|
||||
};
|
||||
|
||||
let async_body = this.make_desugared_coroutine_expr(
|
||||
capture_clause,
|
||||
inner_closure_id,
|
||||
async_ret_ty,
|
||||
body.span,
|
||||
hir::CoroutineDesugaring::Async,
|
||||
hir::CoroutineSource::Closure,
|
||||
let (parameters, expr) = this.lower_coroutine_body_with_moved_arguments(
|
||||
decl,
|
||||
|this| this.with_new_scopes(fn_decl_span, |this| this.lower_expr_mut(body)),
|
||||
body.span,
|
||||
coroutine_kind,
|
||||
hir::CoroutineSource::Closure,
|
||||
async_ret_ty,
|
||||
);
|
||||
let hir_id = this.lower_node_id(inner_closure_id);
|
||||
|
||||
let hir_id = this.lower_node_id(coroutine_kind.closure_id());
|
||||
this.maybe_forward_track_caller(body.span, closure_hir_id, hir_id);
|
||||
hir::Expr { hir_id, kind: async_body, span: this.lower_span(body.span) }
|
||||
|
||||
(parameters, expr)
|
||||
});
|
||||
body_id
|
||||
});
|
||||
|
||||
let outer_decl =
|
||||
FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
|
||||
|
||||
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
|
||||
// We need to lower the declaration outside the new scope, because we
|
||||
// have to conserve the state of being inside a loop condition for the
|
||||
|
|
|
@ -1082,196 +1082,226 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let (Some(coroutine_kind), Some(body)) = (coroutine_kind, body) else {
|
||||
return self.lower_fn_body_block(span, decl, body);
|
||||
};
|
||||
let closure_id = coroutine_kind.closure_id();
|
||||
|
||||
self.lower_body(|this| {
|
||||
let mut parameters: Vec<hir::Param<'_>> = Vec::new();
|
||||
let mut statements: Vec<hir::Stmt<'_>> = Vec::new();
|
||||
|
||||
// Async function parameters are lowered into the closure body so that they are
|
||||
// captured and so that the drop order matches the equivalent non-async functions.
|
||||
//
|
||||
// from:
|
||||
//
|
||||
// async fn foo(<pattern>: <ty>, <pattern>: <ty>, <pattern>: <ty>) {
|
||||
// <body>
|
||||
// }
|
||||
//
|
||||
// into:
|
||||
//
|
||||
// fn foo(__arg0: <ty>, __arg1: <ty>, __arg2: <ty>) {
|
||||
// async move {
|
||||
// let __arg2 = __arg2;
|
||||
// let <pattern> = __arg2;
|
||||
// let __arg1 = __arg1;
|
||||
// let <pattern> = __arg1;
|
||||
// let __arg0 = __arg0;
|
||||
// let <pattern> = __arg0;
|
||||
// drop-temps { <body> } // see comments later in fn for details
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// If `<pattern>` is a simple ident, then it is lowered to a single
|
||||
// `let <pattern> = <pattern>;` statement as an optimization.
|
||||
//
|
||||
// Note that the body is embedded in `drop-temps`; an
|
||||
// equivalent desugaring would be `return { <body>
|
||||
// };`. The key point is that we wish to drop all the
|
||||
// let-bound variables and temporaries created in the body
|
||||
// (and its tail expression!) before we drop the
|
||||
// parameters (c.f. rust-lang/rust#64512).
|
||||
for (index, parameter) in decl.inputs.iter().enumerate() {
|
||||
let parameter = this.lower_param(parameter);
|
||||
let span = parameter.pat.span;
|
||||
|
||||
// Check if this is a binding pattern, if so, we can optimize and avoid adding a
|
||||
// `let <pat> = __argN;` statement. In this case, we do not rename the parameter.
|
||||
let (ident, is_simple_parameter) = match parameter.pat.kind {
|
||||
hir::PatKind::Binding(hir::BindingAnnotation(ByRef::No, _), _, ident, _) => {
|
||||
(ident, true)
|
||||
}
|
||||
// For `ref mut` or wildcard arguments, we can't reuse the binding, but
|
||||
// we can keep the same name for the parameter.
|
||||
// This lets rustdoc render it correctly in documentation.
|
||||
hir::PatKind::Binding(_, _, ident, _) => (ident, false),
|
||||
hir::PatKind::Wild => {
|
||||
(Ident::with_dummy_span(rustc_span::symbol::kw::Underscore), false)
|
||||
}
|
||||
_ => {
|
||||
// Replace the ident for bindings that aren't simple.
|
||||
let name = format!("__arg{index}");
|
||||
let ident = Ident::from_str(&name);
|
||||
|
||||
(ident, false)
|
||||
}
|
||||
};
|
||||
|
||||
let desugared_span = this.mark_span_with_reason(DesugaringKind::Async, span, None);
|
||||
|
||||
// Construct a parameter representing `__argN: <ty>` to replace the parameter of the
|
||||
// async function.
|
||||
//
|
||||
// If this is the simple case, this parameter will end up being the same as the
|
||||
// original parameter, but with a different pattern id.
|
||||
let stmt_attrs = this.attrs.get(¶meter.hir_id.local_id).copied();
|
||||
let (new_parameter_pat, new_parameter_id) = this.pat_ident(desugared_span, ident);
|
||||
let new_parameter = hir::Param {
|
||||
hir_id: parameter.hir_id,
|
||||
pat: new_parameter_pat,
|
||||
ty_span: this.lower_span(parameter.ty_span),
|
||||
span: this.lower_span(parameter.span),
|
||||
};
|
||||
|
||||
if is_simple_parameter {
|
||||
// If this is the simple case, then we only insert one statement that is
|
||||
// `let <pat> = <pat>;`. We re-use the original argument's pattern so that
|
||||
// `HirId`s are densely assigned.
|
||||
let expr = this.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let stmt = this.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(expr),
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
statements.push(stmt);
|
||||
} else {
|
||||
// If this is not the simple case, then we construct two statements:
|
||||
//
|
||||
// ```
|
||||
// let __argN = __argN;
|
||||
// let <pat> = __argN;
|
||||
// ```
|
||||
//
|
||||
// The first statement moves the parameter into the closure and thus ensures
|
||||
// that the drop order is correct.
|
||||
//
|
||||
// The second statement creates the bindings that the user wrote.
|
||||
|
||||
// Construct the `let mut __argN = __argN;` statement. It must be a mut binding
|
||||
// because the user may have specified a `ref mut` binding in the next
|
||||
// statement.
|
||||
let (move_pat, move_id) = this.pat_ident_binding_mode(
|
||||
desugared_span,
|
||||
ident,
|
||||
hir::BindingAnnotation::MUT,
|
||||
);
|
||||
let move_expr = this.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let move_stmt = this.stmt_let_pat(
|
||||
None,
|
||||
desugared_span,
|
||||
Some(move_expr),
|
||||
move_pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
|
||||
// Construct the `let <pat> = __argN;` statement. We re-use the original
|
||||
// parameter's pattern so that `HirId`s are densely assigned.
|
||||
let pattern_expr = this.expr_ident(desugared_span, ident, move_id);
|
||||
let pattern_stmt = this.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(pattern_expr),
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
|
||||
statements.push(move_stmt);
|
||||
statements.push(pattern_stmt);
|
||||
};
|
||||
|
||||
parameters.push(new_parameter);
|
||||
}
|
||||
|
||||
let mkbody = |this: &mut LoweringContext<'_, 'hir>| {
|
||||
// Create a block from the user's function body:
|
||||
let user_body = this.lower_block_expr(body);
|
||||
|
||||
// Transform into `drop-temps { <user-body> }`, an expression:
|
||||
let desugared_span =
|
||||
this.mark_span_with_reason(DesugaringKind::Async, user_body.span, None);
|
||||
let user_body = this.expr_drop_temps(desugared_span, this.arena.alloc(user_body));
|
||||
|
||||
// As noted above, create the final block like
|
||||
//
|
||||
// ```
|
||||
// {
|
||||
// let $param_pattern = $raw_param;
|
||||
// ...
|
||||
// drop-temps { <user-body> }
|
||||
// }
|
||||
// ```
|
||||
let body = this.block_all(
|
||||
desugared_span,
|
||||
this.arena.alloc_from_iter(statements),
|
||||
Some(user_body),
|
||||
);
|
||||
|
||||
this.expr_block(body)
|
||||
};
|
||||
let desugaring_kind = match coroutine_kind {
|
||||
CoroutineKind::Async { .. } => hir::CoroutineDesugaring::Async,
|
||||
CoroutineKind::Gen { .. } => hir::CoroutineDesugaring::Gen,
|
||||
CoroutineKind::AsyncGen { .. } => hir::CoroutineDesugaring::AsyncGen,
|
||||
};
|
||||
let coroutine_expr = this.make_desugared_coroutine_expr(
|
||||
CaptureBy::Value { move_kw: rustc_span::DUMMY_SP },
|
||||
closure_id,
|
||||
None,
|
||||
let (parameters, expr) = this.lower_coroutine_body_with_moved_arguments(
|
||||
decl,
|
||||
|this| this.lower_block_expr(body),
|
||||
body.span,
|
||||
desugaring_kind,
|
||||
coroutine_kind,
|
||||
hir::CoroutineSource::Fn,
|
||||
mkbody,
|
||||
None,
|
||||
);
|
||||
|
||||
let hir_id = this.lower_node_id(closure_id);
|
||||
// FIXME(async_fn_track_caller): Can this be moved above?
|
||||
let hir_id = this.lower_node_id(coroutine_kind.closure_id());
|
||||
this.maybe_forward_track_caller(body.span, fn_id, hir_id);
|
||||
let expr = hir::Expr { hir_id, kind: coroutine_expr, span: this.lower_span(body.span) };
|
||||
|
||||
(this.arena.alloc_from_iter(parameters), expr)
|
||||
(parameters, expr)
|
||||
})
|
||||
}
|
||||
|
||||
/// Lowers a desugared coroutine body after moving all of the arguments
|
||||
/// into the body. This is to make sure that the future actually owns the
|
||||
/// arguments that are passed to the function, and to ensure things like
|
||||
/// drop order are stable.
|
||||
pub fn lower_coroutine_body_with_moved_arguments(
|
||||
&mut self,
|
||||
decl: &FnDecl,
|
||||
lower_body: impl FnOnce(&mut LoweringContext<'_, 'hir>) -> hir::Expr<'hir>,
|
||||
body_span: Span,
|
||||
coroutine_kind: CoroutineKind,
|
||||
coroutine_source: hir::CoroutineSource,
|
||||
return_type_hint: Option<hir::FnRetTy<'hir>>,
|
||||
) -> (&'hir [hir::Param<'hir>], hir::Expr<'hir>) {
|
||||
let mut parameters: Vec<hir::Param<'_>> = Vec::new();
|
||||
let mut statements: Vec<hir::Stmt<'_>> = Vec::new();
|
||||
|
||||
// Async function parameters are lowered into the closure body so that they are
|
||||
// captured and so that the drop order matches the equivalent non-async functions.
|
||||
//
|
||||
// from:
|
||||
//
|
||||
// async fn foo(<pattern>: <ty>, <pattern>: <ty>, <pattern>: <ty>) {
|
||||
// <body>
|
||||
// }
|
||||
//
|
||||
// into:
|
||||
//
|
||||
// fn foo(__arg0: <ty>, __arg1: <ty>, __arg2: <ty>) {
|
||||
// async move {
|
||||
// let __arg2 = __arg2;
|
||||
// let <pattern> = __arg2;
|
||||
// let __arg1 = __arg1;
|
||||
// let <pattern> = __arg1;
|
||||
// let __arg0 = __arg0;
|
||||
// let <pattern> = __arg0;
|
||||
// drop-temps { <body> } // see comments later in fn for details
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// If `<pattern>` is a simple ident, then it is lowered to a single
|
||||
// `let <pattern> = <pattern>;` statement as an optimization.
|
||||
//
|
||||
// Note that the body is embedded in `drop-temps`; an
|
||||
// equivalent desugaring would be `return { <body>
|
||||
// };`. The key point is that we wish to drop all the
|
||||
// let-bound variables and temporaries created in the body
|
||||
// (and its tail expression!) before we drop the
|
||||
// parameters (c.f. rust-lang/rust#64512).
|
||||
for (index, parameter) in decl.inputs.iter().enumerate() {
|
||||
let parameter = self.lower_param(parameter);
|
||||
let span = parameter.pat.span;
|
||||
|
||||
// Check if this is a binding pattern, if so, we can optimize and avoid adding a
|
||||
// `let <pat> = __argN;` statement. In this case, we do not rename the parameter.
|
||||
let (ident, is_simple_parameter) = match parameter.pat.kind {
|
||||
hir::PatKind::Binding(hir::BindingAnnotation(ByRef::No, _), _, ident, _) => {
|
||||
(ident, true)
|
||||
}
|
||||
// For `ref mut` or wildcard arguments, we can't reuse the binding, but
|
||||
// we can keep the same name for the parameter.
|
||||
// This lets rustdoc render it correctly in documentation.
|
||||
hir::PatKind::Binding(_, _, ident, _) => (ident, false),
|
||||
hir::PatKind::Wild => {
|
||||
(Ident::with_dummy_span(rustc_span::symbol::kw::Underscore), false)
|
||||
}
|
||||
_ => {
|
||||
// Replace the ident for bindings that aren't simple.
|
||||
let name = format!("__arg{index}");
|
||||
let ident = Ident::from_str(&name);
|
||||
|
||||
(ident, false)
|
||||
}
|
||||
};
|
||||
|
||||
let desugared_span = self.mark_span_with_reason(DesugaringKind::Async, span, None);
|
||||
|
||||
// Construct a parameter representing `__argN: <ty>` to replace the parameter of the
|
||||
// async function.
|
||||
//
|
||||
// If this is the simple case, this parameter will end up being the same as the
|
||||
// original parameter, but with a different pattern id.
|
||||
let stmt_attrs = self.attrs.get(¶meter.hir_id.local_id).copied();
|
||||
let (new_parameter_pat, new_parameter_id) = self.pat_ident(desugared_span, ident);
|
||||
let new_parameter = hir::Param {
|
||||
hir_id: parameter.hir_id,
|
||||
pat: new_parameter_pat,
|
||||
ty_span: self.lower_span(parameter.ty_span),
|
||||
span: self.lower_span(parameter.span),
|
||||
};
|
||||
|
||||
if is_simple_parameter {
|
||||
// If this is the simple case, then we only insert one statement that is
|
||||
// `let <pat> = <pat>;`. We re-use the original argument's pattern so that
|
||||
// `HirId`s are densely assigned.
|
||||
let expr = self.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let stmt = self.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(expr),
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
statements.push(stmt);
|
||||
} else {
|
||||
// If this is not the simple case, then we construct two statements:
|
||||
//
|
||||
// ```
|
||||
// let __argN = __argN;
|
||||
// let <pat> = __argN;
|
||||
// ```
|
||||
//
|
||||
// The first statement moves the parameter into the closure and thus ensures
|
||||
// that the drop order is correct.
|
||||
//
|
||||
// The second statement creates the bindings that the user wrote.
|
||||
|
||||
// Construct the `let mut __argN = __argN;` statement. It must be a mut binding
|
||||
// because the user may have specified a `ref mut` binding in the next
|
||||
// statement.
|
||||
let (move_pat, move_id) =
|
||||
self.pat_ident_binding_mode(desugared_span, ident, hir::BindingAnnotation::MUT);
|
||||
let move_expr = self.expr_ident(desugared_span, ident, new_parameter_id);
|
||||
let move_stmt = self.stmt_let_pat(
|
||||
None,
|
||||
desugared_span,
|
||||
Some(move_expr),
|
||||
move_pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
|
||||
// Construct the `let <pat> = __argN;` statement. We re-use the original
|
||||
// parameter's pattern so that `HirId`s are densely assigned.
|
||||
let pattern_expr = self.expr_ident(desugared_span, ident, move_id);
|
||||
let pattern_stmt = self.stmt_let_pat(
|
||||
stmt_attrs,
|
||||
desugared_span,
|
||||
Some(pattern_expr),
|
||||
parameter.pat,
|
||||
hir::LocalSource::AsyncFn,
|
||||
);
|
||||
|
||||
statements.push(move_stmt);
|
||||
statements.push(pattern_stmt);
|
||||
};
|
||||
|
||||
parameters.push(new_parameter);
|
||||
}
|
||||
|
||||
let mkbody = |this: &mut LoweringContext<'_, 'hir>| {
|
||||
// Create a block from the user's function body:
|
||||
let user_body = lower_body(this);
|
||||
|
||||
// Transform into `drop-temps { <user-body> }`, an expression:
|
||||
let desugared_span =
|
||||
this.mark_span_with_reason(DesugaringKind::Async, user_body.span, None);
|
||||
let user_body = this.expr_drop_temps(desugared_span, this.arena.alloc(user_body));
|
||||
|
||||
// As noted above, create the final block like
|
||||
//
|
||||
// ```
|
||||
// {
|
||||
// let $param_pattern = $raw_param;
|
||||
// ...
|
||||
// drop-temps { <user-body> }
|
||||
// }
|
||||
// ```
|
||||
let body = this.block_all(
|
||||
desugared_span,
|
||||
this.arena.alloc_from_iter(statements),
|
||||
Some(user_body),
|
||||
);
|
||||
|
||||
this.expr_block(body)
|
||||
};
|
||||
let desugaring_kind = match coroutine_kind {
|
||||
CoroutineKind::Async { .. } => hir::CoroutineDesugaring::Async,
|
||||
CoroutineKind::Gen { .. } => hir::CoroutineDesugaring::Gen,
|
||||
CoroutineKind::AsyncGen { .. } => hir::CoroutineDesugaring::AsyncGen,
|
||||
};
|
||||
let closure_id = coroutine_kind.closure_id();
|
||||
let coroutine_expr = self.make_desugared_coroutine_expr(
|
||||
// FIXME(async_closures): This should only move locals,
|
||||
// and not upvars. Capturing closure upvars by ref doesn't
|
||||
// work right now anyways, so whatever.
|
||||
CaptureBy::Value { move_kw: rustc_span::DUMMY_SP },
|
||||
closure_id,
|
||||
return_type_hint,
|
||||
body_span,
|
||||
desugaring_kind,
|
||||
coroutine_source,
|
||||
mkbody,
|
||||
);
|
||||
|
||||
let expr = hir::Expr {
|
||||
hir_id: self.lower_node_id(closure_id),
|
||||
kind: coroutine_expr,
|
||||
span: self.lower_span(body_span),
|
||||
};
|
||||
|
||||
(self.arena.alloc_from_iter(parameters), expr)
|
||||
}
|
||||
|
||||
fn lower_method_sig(
|
||||
&mut self,
|
||||
generics: &Generics,
|
||||
|
|
|
@ -662,9 +662,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let (opt_hash_including_bodies, attrs_hash) = if self.tcx.needs_crate_hash() {
|
||||
self.tcx.with_stable_hashing_context(|mut hcx| {
|
||||
let mut stable_hasher = StableHasher::new();
|
||||
hcx.with_hir_bodies(node.def_id(), &bodies, |hcx| {
|
||||
node.hash_stable(hcx, &mut stable_hasher)
|
||||
});
|
||||
node.hash_stable(&mut hcx, &mut stable_hasher);
|
||||
// Bodies are stored out of line, so we need to pull them explicitly in the hash.
|
||||
bodies.hash_stable(&mut hcx, &mut stable_hasher);
|
||||
let h1 = stable_hasher.finish();
|
||||
|
||||
let mut stable_hasher = StableHasher::new();
|
||||
|
|
|
@ -109,6 +109,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// return inner to be processed in next loop
|
||||
PatKind::Paren(inner) => pattern = inner,
|
||||
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
|
||||
PatKind::Err(guar) => break hir::PatKind::Err(*guar),
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1519,6 +1519,11 @@ impl<'a> State<'a> {
|
|||
self.pclose();
|
||||
}
|
||||
PatKind::MacCall(m) => self.print_mac(m),
|
||||
PatKind::Err(_) => {
|
||||
self.popen();
|
||||
self.word("/*ERROR*/");
|
||||
self.pclose();
|
||||
}
|
||||
}
|
||||
self.ann.post(self, AnnNode::Pat(pat))
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ use rustc_span::hygiene::DesugaringKind;
|
|||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
use rustc_span::{BytePos, Span, Symbol};
|
||||
use rustc_trait_selection::infer::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::error_reporting::FindExprBySpan;
|
||||
use rustc_trait_selection::traits::ObligationCtxt;
|
||||
use std::iter;
|
||||
|
||||
|
@ -1257,7 +1258,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
return None;
|
||||
};
|
||||
debug!("checking call args for uses of inner_param: {:?}", args);
|
||||
args.contains(&Operand::Move(inner_param)).then_some((loc, term))
|
||||
args.iter()
|
||||
.map(|a| &a.node)
|
||||
.any(|a| a == &Operand::Move(inner_param))
|
||||
.then_some((loc, term))
|
||||
})
|
||||
else {
|
||||
debug!("no uses of inner_param found as a by-move call arg");
|
||||
|
@ -1301,14 +1305,96 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
place: Place<'tcx>,
|
||||
borrowed_place: Place<'tcx>,
|
||||
) {
|
||||
if let ([ProjectionElem::Index(_)], [ProjectionElem::Index(_)]) =
|
||||
(&place.projection[..], &borrowed_place.projection[..])
|
||||
let tcx = self.infcx.tcx;
|
||||
let hir = tcx.hir();
|
||||
|
||||
if let ([ProjectionElem::Index(index1)], [ProjectionElem::Index(index2)])
|
||||
| (
|
||||
[ProjectionElem::Deref, ProjectionElem::Index(index1)],
|
||||
[ProjectionElem::Deref, ProjectionElem::Index(index2)],
|
||||
) = (&place.projection[..], &borrowed_place.projection[..])
|
||||
{
|
||||
err.help(
|
||||
"consider using `.split_at_mut(position)` or similar method to obtain \
|
||||
two mutable non-overlapping sub-slices",
|
||||
)
|
||||
.help("consider using `.swap(index_1, index_2)` to swap elements at the specified indices");
|
||||
let mut note_default_suggestion = || {
|
||||
err.help(
|
||||
"consider using `.split_at_mut(position)` or similar method to obtain \
|
||||
two mutable non-overlapping sub-slices",
|
||||
)
|
||||
.help("consider using `.swap(index_1, index_2)` to swap elements at the specified indices");
|
||||
};
|
||||
|
||||
let Some(body_id) = tcx.hir_node(self.mir_hir_id()).body_id() else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let mut expr_finder =
|
||||
FindExprBySpan::new(self.body.local_decls[*index1].source_info.span);
|
||||
expr_finder.visit_expr(hir.body(body_id).value);
|
||||
let Some(index1) = expr_finder.result else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
expr_finder = FindExprBySpan::new(self.body.local_decls[*index2].source_info.span);
|
||||
expr_finder.visit_expr(hir.body(body_id).value);
|
||||
let Some(index2) = expr_finder.result else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let sm = tcx.sess.source_map();
|
||||
|
||||
let Ok(index1_str) = sm.span_to_snippet(index1.span) else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let Ok(index2_str) = sm.span_to_snippet(index2.span) else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(object) = hir.parent_id_iter(index1.hir_id).find_map(|id| {
|
||||
if let hir::Node::Expr(expr) = tcx.hir_node(id)
|
||||
&& let hir::ExprKind::Index(obj, ..) = expr.kind
|
||||
{
|
||||
Some(obj)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let Ok(obj_str) = sm.span_to_snippet(object.span) else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(swap_call) = hir.parent_id_iter(object.hir_id).find_map(|id| {
|
||||
if let hir::Node::Expr(call) = tcx.hir_node(id)
|
||||
&& let hir::ExprKind::Call(callee, ..) = call.kind
|
||||
&& let hir::ExprKind::Path(qpath) = callee.kind
|
||||
&& let hir::QPath::Resolved(None, res) = qpath
|
||||
&& let hir::def::Res::Def(_, did) = res.res
|
||||
&& tcx.is_diagnostic_item(sym::mem_swap, did)
|
||||
{
|
||||
Some(call)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) else {
|
||||
note_default_suggestion();
|
||||
return;
|
||||
};
|
||||
|
||||
err.span_suggestion(
|
||||
swap_call.span,
|
||||
"use `.swap()` to swap elements at the specified indices instead",
|
||||
format!("{obj_str}.swap({index1_str}, {index2_str})"),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3242,7 +3328,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
assigned_to, args
|
||||
);
|
||||
for operand in args {
|
||||
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand
|
||||
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) =
|
||||
&operand.node
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
|
|
@ -691,7 +691,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
);
|
||||
// Check if one of the arguments to this function is the target place.
|
||||
let found_target = args.iter().any(|arg| {
|
||||
if let Operand::Move(place) = arg {
|
||||
if let Operand::Move(place) = arg.node {
|
||||
if let Some(potential) = place.as_local() {
|
||||
potential == target
|
||||
} else {
|
||||
|
|
|
@ -23,6 +23,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
|||
use rustc_middle::util::{call_kind, CallDesugaringKind};
|
||||
use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult};
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::{symbol::sym, Span, Symbol, DUMMY_SP};
|
||||
use rustc_target::abi::{FieldIdx, VariantIdx};
|
||||
use rustc_trait_selection::infer::InferCtxtExt;
|
||||
|
@ -111,9 +112,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
debug!("add_moved_or_invoked_closure_note: id={:?}", id);
|
||||
if Some(self.infcx.tcx.parent(id)) == self.infcx.tcx.lang_items().fn_once_trait() {
|
||||
let closure = match args.first() {
|
||||
Some(Operand::Copy(place) | Operand::Move(place))
|
||||
if target == place.local_or_deref_local() =>
|
||||
{
|
||||
Some(Spanned {
|
||||
node: Operand::Copy(place) | Operand::Move(place), ..
|
||||
}) if target == place.local_or_deref_local() => {
|
||||
place.local_or_deref_local().unwrap()
|
||||
}
|
||||
_ => return false,
|
||||
|
@ -1178,9 +1179,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
} else {
|
||||
vec![(move_span.shrink_to_hi(), ".clone()".to_string())]
|
||||
};
|
||||
if let Some(errors) =
|
||||
self.infcx.could_impl_trait(clone_trait, ty, self.param_env)
|
||||
&& !has_sugg
|
||||
if let Some(errors) = self.infcx.type_implements_trait_shallow(
|
||||
clone_trait,
|
||||
ty,
|
||||
self.param_env,
|
||||
) && !has_sugg
|
||||
{
|
||||
let msg = match &errors[..] {
|
||||
[] => "you can `clone` the value and consume it, but this \
|
||||
|
@ -1213,7 +1216,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
|||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
for error in errors {
|
||||
if let FulfillmentErrorCode::CodeSelectionError(
|
||||
if let FulfillmentErrorCode::SelectionError(
|
||||
SelectionError::Unimplemented,
|
||||
) = error.code
|
||||
&& let ty::PredicateKind::Clause(ty::ClauseKind::Trait(
|
||||
|
|
|
@ -1217,19 +1217,22 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
{
|
||||
match self
|
||||
.infcx
|
||||
.could_impl_trait(clone_trait, ty.peel_refs(), self.param_env)
|
||||
.type_implements_trait_shallow(
|
||||
clone_trait,
|
||||
ty.peel_refs(),
|
||||
self.param_env,
|
||||
)
|
||||
.as_deref()
|
||||
{
|
||||
Some([]) => {
|
||||
// The type implements Clone.
|
||||
err.span_help(
|
||||
expr.span,
|
||||
format!(
|
||||
"you can `clone` the `{}` value and consume it, but this \
|
||||
might not be your desired behavior",
|
||||
ty.peel_refs(),
|
||||
),
|
||||
);
|
||||
// FIXME: This error message isn't useful, since we're just
|
||||
// vaguely suggesting to clone a value that already
|
||||
// implements `Clone`.
|
||||
//
|
||||
// A correct suggestion here would take into account the fact
|
||||
// that inference may be affected by missing types on bindings,
|
||||
// etc., to improve "tests/ui/borrowck/issue-91206.stderr", for
|
||||
// example.
|
||||
}
|
||||
None => {
|
||||
if let hir::ExprKind::MethodCall(segment, _rcvr, [], span) =
|
||||
|
@ -1288,7 +1291,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
|||
}
|
||||
// The type doesn't implement Clone because of unmet obligations.
|
||||
for error in errors {
|
||||
if let traits::FulfillmentErrorCode::CodeSelectionError(
|
||||
if let traits::FulfillmentErrorCode::SelectionError(
|
||||
traits::SelectionError::Unimplemented,
|
||||
) = error.code
|
||||
&& let ty::PredicateKind::Clause(ty::ClauseKind::Trait(
|
||||
|
|
|
@ -703,7 +703,7 @@ impl<'cx, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx, R> for MirBorro
|
|||
} => {
|
||||
self.consume_operand(loc, (func, span), flow_state);
|
||||
for arg in args {
|
||||
self.consume_operand(loc, (arg, span), flow_state);
|
||||
self.consume_operand(loc, (&arg.node, arg.span), flow_state);
|
||||
}
|
||||
self.mutate_place(loc, (*destination, span), Deep, flow_state);
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
|
|||
use rustc_middle::ty::{self, OpaqueHiddenType, TyCtxt};
|
||||
use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
||||
use rustc_mir_dataflow::move_paths::MoveData;
|
||||
use rustc_mir_dataflow::points::DenseLocationMap;
|
||||
use rustc_mir_dataflow::ResultsCursor;
|
||||
use rustc_span::symbol::sym;
|
||||
use std::env;
|
||||
|
@ -27,7 +28,7 @@ use crate::{
|
|||
facts::{AllFacts, AllFactsExt, RustcFacts},
|
||||
location::LocationTable,
|
||||
polonius,
|
||||
region_infer::{values::RegionValueElements, RegionInferenceContext},
|
||||
region_infer::RegionInferenceContext,
|
||||
renumber,
|
||||
type_check::{self, MirTypeckRegionConstraints, MirTypeckResults},
|
||||
universal_regions::UniversalRegions,
|
||||
|
@ -98,7 +99,7 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
|
|||
|
||||
let universal_regions = Rc::new(universal_regions);
|
||||
|
||||
let elements = &Rc::new(RegionValueElements::new(body));
|
||||
let elements = &Rc::new(DenseLocationMap::new(body));
|
||||
|
||||
// Run the MIR type-checker.
|
||||
let MirTypeckResults { constraints, universal_region_relations, opaque_type_values } =
|
||||
|
|
|
@ -120,7 +120,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'cx, 'tcx> {
|
|||
} => {
|
||||
self.consume_operand(location, func);
|
||||
for arg in args {
|
||||
self.consume_operand(location, arg);
|
||||
self.consume_operand(location, &arg.node);
|
||||
}
|
||||
self.mutate_place(location, *destination, Deep);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,8 @@ use rustc_middle::mir::{
|
|||
};
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::traits::ObligationCauseCode;
|
||||
use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
|
||||
use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc_mir_dataflow::points::DenseLocationMap;
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::constraints::graph::{self, NormalConstraintGraph, RegionGraph};
|
||||
|
@ -30,8 +31,7 @@ use crate::{
|
|||
nll::PoloniusOutput,
|
||||
region_infer::reverse_sccs::ReverseSccGraph,
|
||||
region_infer::values::{
|
||||
LivenessValues, PlaceholderIndices, RegionElement, RegionValueElements, RegionValues,
|
||||
ToElementIndex,
|
||||
LivenessValues, PlaceholderIndices, RegionElement, RegionValues, ToElementIndex,
|
||||
},
|
||||
type_check::{free_region_relations::UniversalRegionRelations, Locations},
|
||||
universal_regions::UniversalRegions,
|
||||
|
@ -330,7 +330,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
|
||||
type_tests: Vec<TypeTest<'tcx>>,
|
||||
liveness_constraints: LivenessValues,
|
||||
elements: &Rc<RegionValueElements>,
|
||||
elements: &Rc<DenseLocationMap>,
|
||||
) -> Self {
|
||||
debug!("universal_regions: {:#?}", universal_regions);
|
||||
debug!("outlives constraints: {:#?}", outlives_constraints);
|
||||
|
@ -1145,6 +1145,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
}
|
||||
|
||||
let ty = ty.fold_with(&mut OpaqueFolder { tcx });
|
||||
let mut failed = false;
|
||||
|
||||
let ty = tcx.fold_regions(ty, |r, _depth| {
|
||||
let r_vid = self.to_region_vid(r);
|
||||
|
@ -1160,15 +1161,18 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
|||
.filter(|&u_r| !self.universal_regions.is_local_free_region(u_r))
|
||||
.find(|&u_r| self.eval_equal(u_r, r_vid))
|
||||
.map(|u_r| ty::Region::new_var(tcx, u_r))
|
||||
// In the case of a failure, use `ReErased`. We will eventually
|
||||
// return `None` in this case.
|
||||
.unwrap_or(tcx.lifetimes.re_erased)
|
||||
// In case we could not find a named region to map to,
|
||||
// we will return `None` below.
|
||||
.unwrap_or_else(|| {
|
||||
failed = true;
|
||||
r
|
||||
})
|
||||
});
|
||||
|
||||
debug!("try_promote_type_test_subject: folded ty = {:?}", ty);
|
||||
|
||||
// This will be true if we failed to promote some region.
|
||||
if ty.has_erased_regions() {
|
||||
if failed {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,101 +1,18 @@
|
|||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_index::bit_set::SparseBitMatrix;
|
||||
use rustc_index::interval::IntervalSet;
|
||||
use rustc_index::interval::SparseIntervalMatrix;
|
||||
use rustc_index::Idx;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir::{BasicBlock, Body, Location};
|
||||
use rustc_middle::mir::{BasicBlock, Location};
|
||||
use rustc_middle::ty::{self, RegionVid};
|
||||
use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex};
|
||||
use std::fmt::Debug;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::dataflow::BorrowIndex;
|
||||
|
||||
/// Maps between a `Location` and a `PointIndex` (and vice versa).
|
||||
pub(crate) struct RegionValueElements {
|
||||
/// For each basic block, how many points are contained within?
|
||||
statements_before_block: IndexVec<BasicBlock, usize>,
|
||||
|
||||
/// Map backward from each point to the basic block that it
|
||||
/// belongs to.
|
||||
basic_blocks: IndexVec<PointIndex, BasicBlock>,
|
||||
|
||||
num_points: usize,
|
||||
}
|
||||
|
||||
impl RegionValueElements {
|
||||
pub(crate) fn new(body: &Body<'_>) -> Self {
|
||||
let mut num_points = 0;
|
||||
let statements_before_block: IndexVec<BasicBlock, usize> = body
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.map(|block_data| {
|
||||
let v = num_points;
|
||||
num_points += block_data.statements.len() + 1;
|
||||
v
|
||||
})
|
||||
.collect();
|
||||
debug!("RegionValueElements: statements_before_block={:#?}", statements_before_block);
|
||||
debug!("RegionValueElements: num_points={:#?}", num_points);
|
||||
|
||||
let mut basic_blocks = IndexVec::with_capacity(num_points);
|
||||
for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
|
||||
basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb));
|
||||
}
|
||||
|
||||
Self { statements_before_block, basic_blocks, num_points }
|
||||
}
|
||||
|
||||
/// Total number of point indices
|
||||
pub(crate) fn num_points(&self) -> usize {
|
||||
self.num_points
|
||||
}
|
||||
|
||||
/// Converts a `Location` into a `PointIndex`. O(1).
|
||||
pub(crate) fn point_from_location(&self, location: Location) -> PointIndex {
|
||||
let Location { block, statement_index } = location;
|
||||
let start_index = self.statements_before_block[block];
|
||||
PointIndex::new(start_index + statement_index)
|
||||
}
|
||||
|
||||
/// Converts a `Location` into a `PointIndex`. O(1).
|
||||
pub(crate) fn entry_point(&self, block: BasicBlock) -> PointIndex {
|
||||
let start_index = self.statements_before_block[block];
|
||||
PointIndex::new(start_index)
|
||||
}
|
||||
|
||||
/// Return the PointIndex for the block start of this index.
|
||||
pub(crate) fn to_block_start(&self, index: PointIndex) -> PointIndex {
|
||||
PointIndex::new(self.statements_before_block[self.basic_blocks[index]])
|
||||
}
|
||||
|
||||
/// Converts a `PointIndex` back to a location. O(1).
|
||||
pub(crate) fn to_location(&self, index: PointIndex) -> Location {
|
||||
assert!(index.index() < self.num_points);
|
||||
let block = self.basic_blocks[index];
|
||||
let start_index = self.statements_before_block[block];
|
||||
let statement_index = index.index() - start_index;
|
||||
Location { block, statement_index }
|
||||
}
|
||||
|
||||
/// Sometimes we get point-indices back from bitsets that may be
|
||||
/// out of range (because they round up to the nearest 2^N number
|
||||
/// of bits). Use this function to filter such points out if you
|
||||
/// like.
|
||||
pub(crate) fn point_in_range(&self, index: PointIndex) -> bool {
|
||||
index.index() < self.num_points
|
||||
}
|
||||
}
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
/// A single integer representing a `Location` in the MIR control-flow
|
||||
/// graph. Constructed efficiently from `RegionValueElements`.
|
||||
#[orderable]
|
||||
#[debug_format = "PointIndex({})"]
|
||||
pub struct PointIndex {}
|
||||
}
|
||||
use crate::BorrowIndex;
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
/// A single integer representing a `ty::Placeholder`.
|
||||
|
@ -123,10 +40,17 @@ pub(crate) enum RegionElement {
|
|||
/// an interval matrix storing liveness ranges for each region-vid.
|
||||
pub(crate) struct LivenessValues {
|
||||
/// The map from locations to points.
|
||||
elements: Rc<RegionValueElements>,
|
||||
elements: Rc<DenseLocationMap>,
|
||||
|
||||
/// Which regions are live. This is exclusive with the fine-grained tracking in `points`, and
|
||||
/// currently only used for validating promoteds (which don't care about more precise tracking).
|
||||
live_regions: Option<FxHashSet<RegionVid>>,
|
||||
|
||||
/// For each region: the points where it is live.
|
||||
points: SparseIntervalMatrix<RegionVid, PointIndex>,
|
||||
///
|
||||
/// This is not initialized for promoteds, because we don't care *where* within a promoted a
|
||||
/// region is live, only that it is.
|
||||
points: Option<SparseIntervalMatrix<RegionVid, PointIndex>>,
|
||||
|
||||
/// When using `-Zpolonius=next`, for each point: the loans flowing into the live regions at
|
||||
/// that point.
|
||||
|
@ -155,24 +79,52 @@ impl LiveLoans {
|
|||
|
||||
impl LivenessValues {
|
||||
/// Create an empty map of regions to locations where they're live.
|
||||
pub(crate) fn new(elements: Rc<RegionValueElements>) -> Self {
|
||||
pub(crate) fn with_specific_points(elements: Rc<DenseLocationMap>) -> Self {
|
||||
LivenessValues {
|
||||
points: SparseIntervalMatrix::new(elements.num_points),
|
||||
live_regions: None,
|
||||
points: Some(SparseIntervalMatrix::new(elements.num_points())),
|
||||
elements,
|
||||
loans: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an empty map of regions to locations where they're live.
|
||||
///
|
||||
/// Unlike `with_specific_points`, does not track exact locations where something is live, only
|
||||
/// which regions are live.
|
||||
pub(crate) fn without_specific_points(elements: Rc<DenseLocationMap>) -> Self {
|
||||
LivenessValues {
|
||||
live_regions: Some(Default::default()),
|
||||
points: None,
|
||||
elements,
|
||||
loans: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate through each region that has a value in this set.
|
||||
pub(crate) fn regions(&self) -> impl Iterator<Item = RegionVid> {
|
||||
self.points.rows()
|
||||
pub(crate) fn regions(&self) -> impl Iterator<Item = RegionVid> + '_ {
|
||||
self.points.as_ref().expect("use with_specific_points").rows()
|
||||
}
|
||||
|
||||
/// Iterate through each region that has a value in this set.
|
||||
// We are passing query instability implications to the caller.
|
||||
#[rustc_lint_query_instability]
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
pub(crate) fn live_regions_unordered(&self) -> impl Iterator<Item = RegionVid> + '_ {
|
||||
self.live_regions.as_ref().unwrap().iter().copied()
|
||||
}
|
||||
|
||||
/// Records `region` as being live at the given `location`.
|
||||
pub(crate) fn add_location(&mut self, region: RegionVid, location: Location) {
|
||||
debug!("LivenessValues::add_location(region={:?}, location={:?})", region, location);
|
||||
let point = self.elements.point_from_location(location);
|
||||
self.points.insert(region, point);
|
||||
debug!("LivenessValues::add_location(region={:?}, location={:?})", region, location);
|
||||
if let Some(points) = &mut self.points {
|
||||
points.insert(region, point);
|
||||
} else {
|
||||
if self.elements.point_in_range(point) {
|
||||
self.live_regions.as_mut().unwrap().insert(region);
|
||||
}
|
||||
}
|
||||
|
||||
// When available, record the loans flowing into this region as live at the given point.
|
||||
if let Some(loans) = self.loans.as_mut() {
|
||||
|
@ -185,7 +137,13 @@ impl LivenessValues {
|
|||
/// Records `region` as being live at all the given `points`.
|
||||
pub(crate) fn add_points(&mut self, region: RegionVid, points: &IntervalSet<PointIndex>) {
|
||||
debug!("LivenessValues::add_points(region={:?}, points={:?})", region, points);
|
||||
self.points.union_row(region, points);
|
||||
if let Some(this) = &mut self.points {
|
||||
this.union_row(region, points);
|
||||
} else {
|
||||
if points.iter().any(|point| self.elements.point_in_range(point)) {
|
||||
self.live_regions.as_mut().unwrap().insert(region);
|
||||
}
|
||||
}
|
||||
|
||||
// When available, record the loans flowing into this region as live at the given points.
|
||||
if let Some(loans) = self.loans.as_mut() {
|
||||
|
@ -201,23 +159,33 @@ impl LivenessValues {
|
|||
|
||||
/// Records `region` as being live at all the control-flow points.
|
||||
pub(crate) fn add_all_points(&mut self, region: RegionVid) {
|
||||
self.points.insert_all_into_row(region);
|
||||
if let Some(points) = &mut self.points {
|
||||
points.insert_all_into_row(region);
|
||||
} else {
|
||||
self.live_regions.as_mut().unwrap().insert(region);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether `region` is marked live at the given `location`.
|
||||
pub(crate) fn is_live_at(&self, region: RegionVid, location: Location) -> bool {
|
||||
let point = self.elements.point_from_location(location);
|
||||
self.points.row(region).is_some_and(|r| r.contains(point))
|
||||
}
|
||||
|
||||
/// Returns whether `region` is marked live at any location.
|
||||
pub(crate) fn is_live_anywhere(&self, region: RegionVid) -> bool {
|
||||
self.live_points(region).next().is_some()
|
||||
if let Some(points) = &self.points {
|
||||
points.row(region).is_some_and(|r| r.contains(point))
|
||||
} else {
|
||||
unreachable!(
|
||||
"Should be using LivenessValues::with_specific_points to ask whether live at a location"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns an iterator of all the points where `region` is live.
|
||||
fn live_points(&self, region: RegionVid) -> impl Iterator<Item = PointIndex> + '_ {
|
||||
self.points
|
||||
let Some(points) = &self.points else {
|
||||
unreachable!(
|
||||
"Should be using LivenessValues::with_specific_points to ask whether live at a location"
|
||||
)
|
||||
};
|
||||
points
|
||||
.row(region)
|
||||
.into_iter()
|
||||
.flat_map(|set| set.iter())
|
||||
|
@ -298,7 +266,7 @@ impl PlaceholderIndices {
|
|||
/// it would also contain various points from within the function.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RegionValues<N: Idx> {
|
||||
elements: Rc<RegionValueElements>,
|
||||
elements: Rc<DenseLocationMap>,
|
||||
placeholder_indices: Rc<PlaceholderIndices>,
|
||||
points: SparseIntervalMatrix<N, PointIndex>,
|
||||
free_regions: SparseBitMatrix<N, RegionVid>,
|
||||
|
@ -313,14 +281,14 @@ impl<N: Idx> RegionValues<N> {
|
|||
/// Each of the regions in num_region_variables will be initialized with an
|
||||
/// empty set of points and no causal information.
|
||||
pub(crate) fn new(
|
||||
elements: &Rc<RegionValueElements>,
|
||||
elements: &Rc<DenseLocationMap>,
|
||||
num_universal_regions: usize,
|
||||
placeholder_indices: &Rc<PlaceholderIndices>,
|
||||
) -> Self {
|
||||
let num_placeholders = placeholder_indices.len();
|
||||
Self {
|
||||
elements: elements.clone(),
|
||||
points: SparseIntervalMatrix::new(elements.num_points),
|
||||
points: SparseIntervalMatrix::new(elements.num_points()),
|
||||
placeholder_indices: placeholder_indices.clone(),
|
||||
free_regions: SparseBitMatrix::new(num_universal_regions),
|
||||
placeholders: SparseBitMatrix::new(num_placeholders),
|
||||
|
@ -372,7 +340,10 @@ impl<N: Idx> RegionValues<N> {
|
|||
/// elements for the region `from` from `values` and add them to
|
||||
/// the region `to` in `self`.
|
||||
pub(crate) fn merge_liveness(&mut self, to: N, from: RegionVid, values: &LivenessValues) {
|
||||
if let Some(set) = values.points.row(from) {
|
||||
let Some(value_points) = &values.points else {
|
||||
panic!("LivenessValues must track specific points for use in merge_liveness");
|
||||
};
|
||||
if let Some(set) = value_points.row(from) {
|
||||
self.points.union_row(to, set);
|
||||
}
|
||||
}
|
||||
|
@ -486,7 +457,7 @@ impl ToElementIndex for ty::PlaceholderRegion {
|
|||
|
||||
/// For debugging purposes, returns a pretty-printed string of the given points.
|
||||
pub(crate) fn pretty_print_points(
|
||||
elements: &RegionValueElements,
|
||||
elements: &DenseLocationMap,
|
||||
points: impl IntoIterator<Item = PointIndex>,
|
||||
) -> String {
|
||||
pretty_print_region_elements(
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use rustc_data_structures::frozen::Frozen;
|
||||
use rustc_data_structures::transitive_relation::{TransitiveRelation, TransitiveRelationBuilder};
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_infer::infer::canonical::QueryRegionConstraints;
|
||||
use rustc_infer::infer::outlives;
|
||||
use rustc_infer::infer::outlives::env::RegionBoundPairs;
|
||||
|
@ -195,7 +196,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub(crate) fn create(mut self) -> CreateResult<'tcx> {
|
||||
let span = self.infcx.tcx.def_span(self.universal_regions.defining_ty.def_id());
|
||||
let tcx = self.infcx.tcx;
|
||||
let defining_ty_def_id = self.universal_regions.defining_ty.def_id().expect_local();
|
||||
let span = tcx.def_span(defining_ty_def_id);
|
||||
|
||||
// Insert the facts we know from the predicates. Why? Why not.
|
||||
let param_env = self.param_env;
|
||||
|
@ -275,6 +278,26 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
|
|||
normalized_inputs_and_output.push(norm_ty);
|
||||
}
|
||||
|
||||
// Add implied bounds from impl header.
|
||||
if matches!(tcx.def_kind(defining_ty_def_id), DefKind::AssocFn | DefKind::AssocConst) {
|
||||
for &(ty, _) in tcx.assumed_wf_types(tcx.local_parent(defining_ty_def_id)) {
|
||||
let Ok(TypeOpOutput { output: norm_ty, constraints: c, .. }) = self
|
||||
.param_env
|
||||
.and(type_op::normalize::Normalize::new(ty))
|
||||
.fully_perform(self.infcx, span)
|
||||
else {
|
||||
tcx.dcx().span_delayed_bug(span, format!("failed to normalize {ty:?}"));
|
||||
continue;
|
||||
};
|
||||
constraints.extend(c);
|
||||
|
||||
// We currently add implied bounds from the normalized ty only.
|
||||
// This is more conservative and matches wfcheck behavior.
|
||||
let c = self.add_implied_bounds(norm_ty);
|
||||
constraints.extend(c);
|
||||
}
|
||||
}
|
||||
|
||||
for c in constraints {
|
||||
self.push_region_constraints(c, span);
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
//! `RETURN_PLACE` the MIR arguments) are always fully normalized (and
|
||||
//! contain revealed `impl Trait` values).
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_infer::infer::BoundRegionConversionTime;
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
|
@ -39,9 +40,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
user_provided_sig,
|
||||
);
|
||||
|
||||
for (&user_ty, arg_decl) in user_provided_sig.inputs().iter().zip(
|
||||
// In MIR, closure args begin with an implicit `self`. Skip it!
|
||||
body.args_iter().skip(1).map(|local| &body.local_decls[local]),
|
||||
let is_coroutine_with_implicit_resume_ty = self.tcx().is_coroutine(mir_def_id.to_def_id())
|
||||
&& user_provided_sig.inputs().is_empty();
|
||||
|
||||
for (&user_ty, arg_decl) in user_provided_sig.inputs().iter().zip_eq(
|
||||
// In MIR, closure args begin with an implicit `self`.
|
||||
// Also, coroutines have a resume type which may be implicitly `()`.
|
||||
body.args_iter()
|
||||
.skip(1 + if is_coroutine_with_implicit_resume_ty { 1 } else { 0 })
|
||||
.map(|local| &body.local_decls[local]),
|
||||
) {
|
||||
self.ascribe_user_type_skip_wf(
|
||||
arg_decl.ty,
|
||||
|
|
|
@ -2,9 +2,9 @@ use rustc_data_structures::vec_linked_list as vll;
|
|||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir::visit::{PlaceContext, Visitor};
|
||||
use rustc_middle::mir::{Body, Local, Location};
|
||||
use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex};
|
||||
|
||||
use crate::def_use::{self, DefUse};
|
||||
use crate::region_infer::values::{PointIndex, RegionValueElements};
|
||||
|
||||
/// A map that cross references each local with the locations where it
|
||||
/// is defined (assigned), used, or dropped. Used during liveness
|
||||
|
@ -60,7 +60,7 @@ impl vll::LinkElem for Appearance {
|
|||
impl LocalUseMap {
|
||||
pub(crate) fn build(
|
||||
live_locals: &[Local],
|
||||
elements: &RegionValueElements,
|
||||
elements: &DenseLocationMap,
|
||||
body: &Body<'_>,
|
||||
) -> Self {
|
||||
let nones = IndexVec::from_elem(None, &body.local_decls);
|
||||
|
@ -103,7 +103,7 @@ impl LocalUseMap {
|
|||
|
||||
struct LocalUseMapBuild<'me> {
|
||||
local_use_map: &'me mut LocalUseMap,
|
||||
elements: &'me RegionValueElements,
|
||||
elements: &'me DenseLocationMap,
|
||||
|
||||
// Vector used in `visit_local` to signal which `Local`s do we need
|
||||
// def/use/drop information on, constructed from `live_locals` (that
|
||||
|
@ -144,7 +144,7 @@ impl LocalUseMapBuild<'_> {
|
|||
}
|
||||
|
||||
fn insert(
|
||||
elements: &RegionValueElements,
|
||||
elements: &DenseLocationMap,
|
||||
first_appearance: &mut Option<AppearanceIndex>,
|
||||
appearances: &mut IndexVec<AppearanceIndex, Appearance>,
|
||||
location: Location,
|
||||
|
|
|
@ -6,6 +6,7 @@ use rustc_middle::ty::visit::TypeVisitable;
|
|||
use rustc_middle::ty::{GenericArgsRef, Region, RegionVid, Ty, TyCtxt};
|
||||
use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
|
||||
use rustc_mir_dataflow::move_paths::MoveData;
|
||||
use rustc_mir_dataflow::points::DenseLocationMap;
|
||||
use rustc_mir_dataflow::ResultsCursor;
|
||||
use std::rc::Rc;
|
||||
|
||||
|
@ -13,7 +14,7 @@ use crate::{
|
|||
constraints::OutlivesConstraintSet,
|
||||
facts::{AllFacts, AllFactsExt},
|
||||
location::LocationTable,
|
||||
region_infer::values::{LivenessValues, RegionValueElements},
|
||||
region_infer::values::LivenessValues,
|
||||
universal_regions::UniversalRegions,
|
||||
};
|
||||
|
||||
|
@ -34,7 +35,7 @@ mod trace;
|
|||
pub(super) fn generate<'mir, 'tcx>(
|
||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
elements: &Rc<RegionValueElements>,
|
||||
elements: &Rc<DenseLocationMap>,
|
||||
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
|
|
|
@ -7,6 +7,7 @@ use rustc_infer::infer::outlives::for_liveness;
|
|||
use rustc_middle::mir::{BasicBlock, Body, ConstraintCategory, Local, Location};
|
||||
use rustc_middle::traits::query::DropckOutlivesResult;
|
||||
use rustc_middle::ty::{Ty, TyCtxt, TypeVisitable, TypeVisitableExt};
|
||||
use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex};
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_trait_selection::traits::query::type_op::outlives::DropckOutlives;
|
||||
use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
|
||||
|
@ -17,7 +18,7 @@ use rustc_mir_dataflow::move_paths::{HasMoveData, MoveData, MovePathIndex};
|
|||
use rustc_mir_dataflow::ResultsCursor;
|
||||
|
||||
use crate::{
|
||||
region_infer::values::{self, LiveLoans, PointIndex, RegionValueElements},
|
||||
region_infer::values::{self, LiveLoans},
|
||||
type_check::liveness::local_use_map::LocalUseMap,
|
||||
type_check::liveness::polonius,
|
||||
type_check::NormalizeLocation,
|
||||
|
@ -41,7 +42,7 @@ use crate::{
|
|||
pub(super) fn trace<'mir, 'tcx>(
|
||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||
body: &Body<'tcx>,
|
||||
elements: &Rc<RegionValueElements>,
|
||||
elements: &Rc<DenseLocationMap>,
|
||||
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
relevant_live_locals: Vec<Local>,
|
||||
|
@ -105,7 +106,7 @@ struct LivenessContext<'me, 'typeck, 'flow, 'tcx> {
|
|||
typeck: &'me mut TypeChecker<'typeck, 'tcx>,
|
||||
|
||||
/// Defines the `PointIndex` mapping
|
||||
elements: &'me RegionValueElements,
|
||||
elements: &'me DenseLocationMap,
|
||||
|
||||
/// MIR we are analyzing.
|
||||
body: &'me Body<'tcx>,
|
||||
|
@ -570,7 +571,7 @@ impl<'tcx> LivenessContext<'_, '_, '_, 'tcx> {
|
|||
}
|
||||
|
||||
fn make_all_regions_live(
|
||||
elements: &RegionValueElements,
|
||||
elements: &DenseLocationMap,
|
||||
typeck: &mut TypeChecker<'_, 'tcx>,
|
||||
value: impl TypeVisitable<TyCtxt<'tcx>>,
|
||||
live_at: &IntervalSet<PointIndex>,
|
||||
|
|
|
@ -35,7 +35,9 @@ use rustc_middle::ty::{
|
|||
OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, UserType, UserTypeAnnotationIndex,
|
||||
};
|
||||
use rustc_middle::ty::{GenericArgsRef, UserArgs};
|
||||
use rustc_mir_dataflow::points::DenseLocationMap;
|
||||
use rustc_span::def_id::CRATE_DEF_ID;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::{FieldIdx, FIRST_VARIANT};
|
||||
|
@ -58,9 +60,7 @@ use crate::{
|
|||
location::LocationTable,
|
||||
member_constraints::MemberConstraintSet,
|
||||
path_utils,
|
||||
region_infer::values::{
|
||||
LivenessValues, PlaceholderIndex, PlaceholderIndices, RegionValueElements,
|
||||
},
|
||||
region_infer::values::{LivenessValues, PlaceholderIndex, PlaceholderIndices},
|
||||
region_infer::TypeTest,
|
||||
type_check::free_region_relations::{CreateResult, UniversalRegionRelations},
|
||||
universal_regions::{DefiningTy, UniversalRegions},
|
||||
|
@ -133,7 +133,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
all_facts: &mut Option<AllFacts>,
|
||||
flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
|
||||
move_data: &MoveData<'tcx>,
|
||||
elements: &Rc<RegionValueElements>,
|
||||
elements: &Rc<DenseLocationMap>,
|
||||
upvars: &[&ty::CapturedPlace<'tcx>],
|
||||
use_polonius: bool,
|
||||
) -> MirTypeckResults<'tcx> {
|
||||
|
@ -141,7 +141,7 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
|||
let mut constraints = MirTypeckRegionConstraints {
|
||||
placeholder_indices: PlaceholderIndices::default(),
|
||||
placeholder_index_to_region: IndexVec::default(),
|
||||
liveness_constraints: LivenessValues::new(elements.clone()),
|
||||
liveness_constraints: LivenessValues::with_specific_points(elements.clone()),
|
||||
outlives_constraints: OutlivesConstraintSet::default(),
|
||||
member_constraints: MemberConstraintSet::default(),
|
||||
type_tests: Vec::default(),
|
||||
|
@ -406,6 +406,16 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
|
|||
instantiated_predicates,
|
||||
locations,
|
||||
);
|
||||
|
||||
assert!(!matches!(
|
||||
tcx.impl_of_method(def_id).map(|imp| tcx.def_kind(imp)),
|
||||
Some(DefKind::Impl { of_trait: true })
|
||||
));
|
||||
self.cx.prove_predicates(
|
||||
args.types().map(|ty| ty::ClauseKind::WellFormed(ty.into())),
|
||||
locations,
|
||||
ConstraintCategory::Boring,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -545,7 +555,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
let all_facts = &mut None;
|
||||
let mut constraints = Default::default();
|
||||
let mut liveness_constraints =
|
||||
LivenessValues::new(Rc::new(RegionValueElements::new(promoted_body)));
|
||||
LivenessValues::without_specific_points(Rc::new(DenseLocationMap::new(promoted_body)));
|
||||
// Don't try to add borrow_region facts for the promoted MIR
|
||||
|
||||
let mut swap_constraints = |this: &mut Self| {
|
||||
|
@ -584,17 +594,19 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
|
|||
}
|
||||
self.cx.borrowck_context.constraints.outlives_constraints.push(constraint)
|
||||
}
|
||||
for region in liveness_constraints.regions() {
|
||||
// If the region is live at at least one location in the promoted MIR,
|
||||
// then add a liveness constraint to the main MIR for this region
|
||||
// at the location provided as an argument to this method
|
||||
if liveness_constraints.is_live_anywhere(region) {
|
||||
self.cx
|
||||
.borrowck_context
|
||||
.constraints
|
||||
.liveness_constraints
|
||||
.add_location(region, location);
|
||||
}
|
||||
// If the region is live at at least one location in the promoted MIR,
|
||||
// then add a liveness constraint to the main MIR for this region
|
||||
// at the location provided as an argument to this method
|
||||
//
|
||||
// add_location doesn't care about ordering so not a problem for the live regions to be
|
||||
// unordered.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for region in liveness_constraints.live_regions_unordered() {
|
||||
self.cx
|
||||
.borrowck_context
|
||||
.constraints
|
||||
.liveness_constraints
|
||||
.add_location(region, location);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1359,7 +1371,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
TerminatorKind::Call { func, args, destination, call_source, target, .. } => {
|
||||
self.check_operand(func, term_location);
|
||||
for arg in args {
|
||||
self.check_operand(arg, term_location);
|
||||
self.check_operand(&arg.node, term_location);
|
||||
}
|
||||
|
||||
let func_ty = func.ty(body, tcx);
|
||||
|
@ -1580,7 +1592,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
term: &Terminator<'tcx>,
|
||||
func: &Operand<'tcx>,
|
||||
sig: &ty::FnSig<'tcx>,
|
||||
args: &[Operand<'tcx>],
|
||||
args: &[Spanned<Operand<'tcx>>],
|
||||
term_location: Location,
|
||||
call_source: CallSource,
|
||||
) {
|
||||
|
@ -1593,7 +1605,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
if self.tcx().is_intrinsic(def_id) {
|
||||
match self.tcx().item_name(def_id) {
|
||||
sym::simd_shuffle => {
|
||||
if !matches!(args[2], Operand::Constant(_)) {
|
||||
if !matches!(args[2], Spanned { node: Operand::Constant(_), .. }) {
|
||||
self.tcx()
|
||||
.dcx()
|
||||
.emit_err(SimdShuffleLastConst { span: term.source_info.span });
|
||||
|
@ -1606,7 +1618,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
debug!(?func_ty);
|
||||
|
||||
for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
|
||||
let op_arg_ty = op_arg.ty(body, self.tcx());
|
||||
let op_arg_ty = op_arg.node.ty(body, self.tcx());
|
||||
|
||||
let op_arg_ty = self.normalize(op_arg_ty, term_location);
|
||||
let category = if call_source.from_hir_call() {
|
||||
|
|
|
@ -668,7 +668,11 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
|
|||
kind: ty::BrEnv,
|
||||
};
|
||||
let env_region = ty::Region::new_bound(tcx, ty::INNERMOST, br);
|
||||
let closure_ty = tcx.closure_env_ty(def_id, args, env_region).unwrap();
|
||||
let closure_ty = tcx.closure_env_ty(
|
||||
Ty::new_closure(tcx, def_id, args),
|
||||
args.as_closure().kind(),
|
||||
env_region,
|
||||
);
|
||||
|
||||
// The "inputs" of the closure in the
|
||||
// signature appear as a tuple. The MIR side
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#![doc(rust_logo)]
|
||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||
#![feature(array_windows)]
|
||||
#![feature(assert_matches)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(if_let_guard)]
|
||||
|
|
|
@ -9,6 +9,7 @@ use rustc_errors::{Applicability, DiagnosticBuilder, Level};
|
|||
use rustc_expand::base::*;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::{ErrorGuaranteed, FileNameDisplayPreference, Span};
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::iter;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
|
||||
|
@ -182,6 +183,16 @@ pub fn expand_test_or_bench(
|
|||
// creates $name: $expr
|
||||
let field = |name, expr| cx.field_imm(sp, Ident::from_str_and_span(name, sp), expr);
|
||||
|
||||
// Adds `#[coverage(off)]` to a closure, so it won't be instrumented in
|
||||
// `-Cinstrument-coverage` builds.
|
||||
// This requires `#[allow_internal_unstable(coverage_attribute)]` on the
|
||||
// corresponding macro declaration in `core::macros`.
|
||||
let coverage_off = |mut expr: P<ast::Expr>| {
|
||||
assert_matches!(expr.kind, ast::ExprKind::Closure(_));
|
||||
expr.attrs.push(cx.attr_nested_word(sym::coverage, sym::off, sp));
|
||||
expr
|
||||
};
|
||||
|
||||
let test_fn = if is_bench {
|
||||
// A simple ident for a lambda
|
||||
let b = Ident::from_str_and_span("b", attr_sp);
|
||||
|
@ -190,8 +201,9 @@ pub fn expand_test_or_bench(
|
|||
sp,
|
||||
cx.expr_path(test_path("StaticBenchFn")),
|
||||
thin_vec![
|
||||
// #[coverage(off)]
|
||||
// |b| self::test::assert_test_result(
|
||||
cx.lambda1(
|
||||
coverage_off(cx.lambda1(
|
||||
sp,
|
||||
cx.expr_call(
|
||||
sp,
|
||||
|
@ -206,7 +218,7 @@ pub fn expand_test_or_bench(
|
|||
],
|
||||
),
|
||||
b,
|
||||
), // )
|
||||
)), // )
|
||||
],
|
||||
)
|
||||
} else {
|
||||
|
@ -214,8 +226,9 @@ pub fn expand_test_or_bench(
|
|||
sp,
|
||||
cx.expr_path(test_path("StaticTestFn")),
|
||||
thin_vec![
|
||||
// #[coverage(off)]
|
||||
// || {
|
||||
cx.lambda0(
|
||||
coverage_off(cx.lambda0(
|
||||
sp,
|
||||
// test::assert_test_result(
|
||||
cx.expr_call(
|
||||
|
@ -230,7 +243,7 @@ pub fn expand_test_or_bench(
|
|||
), // )
|
||||
],
|
||||
), // }
|
||||
), // )
|
||||
)), // )
|
||||
],
|
||||
)
|
||||
};
|
||||
|
|
|
@ -246,12 +246,12 @@ checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
|
|||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.7.4"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
|
||||
checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -19,7 +19,7 @@ gimli = { version = "0.28", default-features = false, features = ["write"]}
|
|||
object = { version = "0.32", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
|
||||
|
||||
indexmap = "2.0.0"
|
||||
libloading = { version = "0.7.3", optional = true }
|
||||
libloading = { version = "0.8.0", optional = true }
|
||||
smallvec = "1.8.1"
|
||||
|
||||
[patch.crates-io]
|
||||
|
|
|
@ -11,6 +11,7 @@ use cranelift_module::ModuleError;
|
|||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::ty::layout::FnAbiOf;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_target::abi::call::{Conv, FnAbi};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
|
@ -360,7 +361,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
source_info: mir::SourceInfo,
|
||||
func: &Operand<'tcx>,
|
||||
args: &[Operand<'tcx>],
|
||||
args: &[Spanned<Operand<'tcx>>],
|
||||
destination: Place<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
) {
|
||||
|
@ -415,7 +416,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
|
||||
let extra_args = &args[fn_sig.inputs().skip_binder().len()..];
|
||||
let extra_args = fx.tcx.mk_type_list_from_iter(
|
||||
extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx))),
|
||||
extra_args.iter().map(|op_arg| fx.monomorphize(op_arg.node.ty(fx.mir, fx.tcx))),
|
||||
);
|
||||
let fn_abi = if let Some(instance) = instance {
|
||||
RevealAllLayoutCx(fx.tcx).fn_abi_of_instance(instance, extra_args)
|
||||
|
@ -440,10 +441,10 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
// Unpack arguments tuple for closures
|
||||
let mut args = if fn_sig.abi() == Abi::RustCall {
|
||||
let (self_arg, pack_arg) = match args {
|
||||
[pack_arg] => (None, codegen_call_argument_operand(fx, pack_arg)),
|
||||
[pack_arg] => (None, codegen_call_argument_operand(fx, &pack_arg.node)),
|
||||
[self_arg, pack_arg] => (
|
||||
Some(codegen_call_argument_operand(fx, self_arg)),
|
||||
codegen_call_argument_operand(fx, pack_arg),
|
||||
Some(codegen_call_argument_operand(fx, &self_arg.node)),
|
||||
codegen_call_argument_operand(fx, &pack_arg.node),
|
||||
),
|
||||
_ => panic!("rust-call abi requires one or two arguments"),
|
||||
};
|
||||
|
@ -463,7 +464,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
}
|
||||
args
|
||||
} else {
|
||||
args.iter().map(|arg| codegen_call_argument_operand(fx, arg)).collect::<Vec<_>>()
|
||||
args.iter().map(|arg| codegen_call_argument_operand(fx, &arg.node)).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// Pass the caller location for `#[track_caller]`.
|
||||
|
|
|
@ -7,7 +7,7 @@ pub(crate) fn codegen_llvm_intrinsic_call<'tcx>(
|
|||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: &str,
|
||||
generic_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
span: Span,
|
||||
|
|
|
@ -7,7 +7,7 @@ pub(crate) fn codegen_aarch64_llvm_intrinsic_call<'tcx>(
|
|||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: &str,
|
||||
_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
) {
|
||||
|
|
|
@ -11,7 +11,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: &str,
|
||||
_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
span: Span,
|
||||
|
@ -175,9 +175,9 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
[x, y, kind] => (x, y, kind),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let x = codegen_operand(fx, x);
|
||||
let y = codegen_operand(fx, y);
|
||||
let kind = match kind {
|
||||
let x = codegen_operand(fx, &x.node);
|
||||
let y = codegen_operand(fx, &y.node);
|
||||
let kind = match &kind.node {
|
||||
Operand::Constant(const_) => crate::constant::eval_mir_constant(fx, const_).0,
|
||||
Operand::Copy(_) | Operand::Move(_) => unreachable!("{kind:?}"),
|
||||
};
|
||||
|
@ -287,8 +287,8 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
[a, b] => (a, b),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let b = codegen_operand(fx, b);
|
||||
let a = codegen_operand(fx, &a.node);
|
||||
let b = codegen_operand(fx, &b.node);
|
||||
|
||||
// Based on the pseudocode at https://github.com/rust-lang/stdarch/blob/1cfbca8b38fd9b4282b2f054f61c6ca69fc7ce29/crates/core_arch/src/x86/avx2.rs#L2319-L2332
|
||||
let zero = fx.bcx.ins().iconst(types::I8, 0);
|
||||
|
@ -325,9 +325,9 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
[a, b, imm8] => (a, b, imm8),
|
||||
_ => bug!("wrong number of args for intrinsic {intrinsic}"),
|
||||
};
|
||||
let a = codegen_operand(fx, a);
|
||||
let b = codegen_operand(fx, b);
|
||||
let imm8 = codegen_operand(fx, imm8).load_scalar(fx);
|
||||
let a = codegen_operand(fx, &a.node);
|
||||
let b = codegen_operand(fx, &b.node);
|
||||
let imm8 = codegen_operand(fx, &imm8.node).load_scalar(fx);
|
||||
|
||||
let a_low = a.value_typed_lane(fx, fx.tcx.types.u128, 0).load_scalar(fx);
|
||||
let a_high = a.value_typed_lane(fx, fx.tcx.types.u128, 1).load_scalar(fx);
|
||||
|
@ -956,14 +956,14 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
let b = b.load_scalar(fx);
|
||||
let lb = lb.load_scalar(fx);
|
||||
|
||||
let imm8 = if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[4])
|
||||
{
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
.span_fatal(span, "Index argument for `_mm_cmpestri` is not a constant");
|
||||
};
|
||||
let imm8 =
|
||||
if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[4].node) {
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
.span_fatal(span, "Index argument for `_mm_cmpestri` is not a constant");
|
||||
};
|
||||
|
||||
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
|
||||
|
||||
|
@ -1009,14 +1009,14 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
let b = b.load_scalar(fx);
|
||||
let lb = lb.load_scalar(fx);
|
||||
|
||||
let imm8 = if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[4])
|
||||
{
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
.span_fatal(span, "Index argument for `_mm_cmpestrm` is not a constant");
|
||||
};
|
||||
let imm8 =
|
||||
if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[4].node) {
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
.span_fatal(span, "Index argument for `_mm_cmpestrm` is not a constant");
|
||||
};
|
||||
|
||||
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
|
||||
|
||||
|
@ -1056,15 +1056,15 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
let a = a.load_scalar(fx);
|
||||
let b = b.load_scalar(fx);
|
||||
|
||||
let imm8 = if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[2])
|
||||
{
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx.dcx().span_fatal(
|
||||
span,
|
||||
"Index argument for `_mm_clmulepi64_si128` is not a constant",
|
||||
);
|
||||
};
|
||||
let imm8 =
|
||||
if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[2].node) {
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx.dcx().span_fatal(
|
||||
span,
|
||||
"Index argument for `_mm_clmulepi64_si128` is not a constant",
|
||||
);
|
||||
};
|
||||
|
||||
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
|
||||
|
||||
|
@ -1093,15 +1093,15 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
|
|||
|
||||
let a = a.load_scalar(fx);
|
||||
|
||||
let imm8 = if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[1])
|
||||
{
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx.dcx().span_fatal(
|
||||
span,
|
||||
"Index argument for `_mm_aeskeygenassist_si128` is not a constant",
|
||||
);
|
||||
};
|
||||
let imm8 =
|
||||
if let Some(imm8) = crate::constant::mir_operand_get_const_val(fx, &args[1].node) {
|
||||
imm8
|
||||
} else {
|
||||
fx.tcx.dcx().span_fatal(
|
||||
span,
|
||||
"Index argument for `_mm_aeskeygenassist_si128` is not a constant",
|
||||
);
|
||||
};
|
||||
|
||||
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ macro_rules! intrinsic_args {
|
|||
($fx:expr, $args:expr => ($($arg:tt),*); $intrinsic:expr) => {
|
||||
#[allow(unused_parens)]
|
||||
let ($($arg),*) = if let [$($arg),*] = $args {
|
||||
($(codegen_operand($fx, $arg)),*)
|
||||
($(codegen_operand($fx, &($arg).node)),*)
|
||||
} else {
|
||||
$crate::intrinsics::bug_on_incorrect_arg_count($intrinsic);
|
||||
};
|
||||
|
@ -22,6 +22,7 @@ use rustc_middle::ty;
|
|||
use rustc_middle::ty::layout::{HasParamEnv, ValidityRequirement};
|
||||
use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
|
||||
use rustc_middle::ty::GenericArgsRef;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
|
||||
pub(crate) use self::llvm::codegen_llvm_intrinsic_call;
|
||||
|
@ -263,7 +264,7 @@ fn bool_to_zero_or_max_uint<'tcx>(
|
|||
pub(crate) fn codegen_intrinsic_call<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
destination: CPlace<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
source_info: mir::SourceInfo,
|
||||
|
@ -301,7 +302,7 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
|
|||
fn codegen_float_intrinsic_call<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: Symbol,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
) -> bool {
|
||||
let (name, arg_count, ty, clif_ty) = match intrinsic {
|
||||
|
@ -353,18 +354,21 @@ fn codegen_float_intrinsic_call<'tcx>(
|
|||
let (a, b, c);
|
||||
let args = match args {
|
||||
[x] => {
|
||||
a = [codegen_operand(fx, x).load_scalar(fx)];
|
||||
a = [codegen_operand(fx, &x.node).load_scalar(fx)];
|
||||
&a as &[_]
|
||||
}
|
||||
[x, y] => {
|
||||
b = [codegen_operand(fx, x).load_scalar(fx), codegen_operand(fx, y).load_scalar(fx)];
|
||||
b = [
|
||||
codegen_operand(fx, &x.node).load_scalar(fx),
|
||||
codegen_operand(fx, &y.node).load_scalar(fx),
|
||||
];
|
||||
&b
|
||||
}
|
||||
[x, y, z] => {
|
||||
c = [
|
||||
codegen_operand(fx, x).load_scalar(fx),
|
||||
codegen_operand(fx, y).load_scalar(fx),
|
||||
codegen_operand(fx, z).load_scalar(fx),
|
||||
codegen_operand(fx, &x.node).load_scalar(fx),
|
||||
codegen_operand(fx, &y.node).load_scalar(fx),
|
||||
codegen_operand(fx, &z.node).load_scalar(fx),
|
||||
];
|
||||
&c
|
||||
}
|
||||
|
@ -422,7 +426,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
instance: Instance<'tcx>,
|
||||
intrinsic: Symbol,
|
||||
generic_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
destination: Option<BasicBlock>,
|
||||
source_info: mir::SourceInfo,
|
||||
|
|
|
@ -21,7 +21,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
intrinsic: Symbol,
|
||||
generic_args: GenericArgsRef<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
ret: CPlace<'tcx>,
|
||||
target: BasicBlock,
|
||||
span: Span,
|
||||
|
@ -121,8 +121,8 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
let [x, y] = args else {
|
||||
bug!("wrong number of args for intrinsic {intrinsic}");
|
||||
};
|
||||
let x = codegen_operand(fx, x);
|
||||
let y = codegen_operand(fx, y);
|
||||
let x = codegen_operand(fx, &x.node);
|
||||
let y = codegen_operand(fx, &y.node);
|
||||
|
||||
if !x.layout().ty.is_simd() {
|
||||
report_simd_type_validation_error(fx, intrinsic, span, x.layout().ty);
|
||||
|
@ -172,8 +172,8 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
bug!("wrong number of args for intrinsic {intrinsic}");
|
||||
}
|
||||
};
|
||||
let x = codegen_operand(fx, x);
|
||||
let y = codegen_operand(fx, y);
|
||||
let x = codegen_operand(fx, &x.node);
|
||||
let y = codegen_operand(fx, &y.node);
|
||||
|
||||
if !x.layout().ty.is_simd() {
|
||||
report_simd_type_validation_error(fx, intrinsic, span, x.layout().ty);
|
||||
|
@ -182,7 +182,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
|
||||
// Make sure this is actually an array, since typeck only checks the length-suffixed
|
||||
// version of this intrinsic.
|
||||
let idx_ty = fx.monomorphize(idx.ty(fx.mir, fx.tcx));
|
||||
let idx_ty = fx.monomorphize(idx.node.ty(fx.mir, fx.tcx));
|
||||
let n: u16 = match idx_ty.kind() {
|
||||
ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => len
|
||||
.try_eval_target_usize(fx.tcx, ty::ParamEnv::reveal_all())
|
||||
|
@ -215,7 +215,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
|
||||
let indexes = {
|
||||
use rustc_middle::mir::interpret::*;
|
||||
let idx_const = match idx {
|
||||
let idx_const = match &idx.node {
|
||||
Operand::Constant(const_) => crate::constant::eval_mir_constant(fx, const_).0,
|
||||
Operand::Copy(_) | Operand::Move(_) => unreachable!("{idx:?}"),
|
||||
};
|
||||
|
@ -269,12 +269,12 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
bug!("wrong number of args for intrinsic {intrinsic}");
|
||||
}
|
||||
};
|
||||
let base = codegen_operand(fx, base);
|
||||
let val = codegen_operand(fx, val);
|
||||
let base = codegen_operand(fx, &base.node);
|
||||
let val = codegen_operand(fx, &val.node);
|
||||
|
||||
// FIXME validate
|
||||
let idx_const = if let Some(idx_const) =
|
||||
crate::constant::mir_operand_get_const_val(fx, idx)
|
||||
crate::constant::mir_operand_get_const_val(fx, &idx.node)
|
||||
{
|
||||
idx_const
|
||||
} else {
|
||||
|
@ -304,7 +304,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
bug!("wrong number of args for intrinsic {intrinsic}");
|
||||
}
|
||||
};
|
||||
let v = codegen_operand(fx, v);
|
||||
let v = codegen_operand(fx, &v.node);
|
||||
|
||||
if !v.layout().ty.is_simd() {
|
||||
report_simd_type_validation_error(fx, intrinsic, span, v.layout().ty);
|
||||
|
@ -312,7 +312,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
|
|||
}
|
||||
|
||||
let idx_const = if let Some(idx_const) =
|
||||
crate::constant::mir_operand_get_const_val(fx, idx)
|
||||
crate::constant::mir_operand_get_const_val(fx, &idx.node)
|
||||
{
|
||||
idx_const
|
||||
} else {
|
||||
|
|
|
@ -11,7 +11,7 @@ test = false
|
|||
bitflags = "2.4.1"
|
||||
itertools = "0.11"
|
||||
libc = "0.2"
|
||||
measureme = "10.0.0"
|
||||
measureme = "11"
|
||||
object = { version = "0.32.0", default-features = false, features = ["std", "read"] }
|
||||
rustc-demangle = "0.1.21"
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
|
|
|
@ -145,6 +145,14 @@ pub unsafe fn create_module<'ll>(
|
|||
.replace("-Fi64", "");
|
||||
}
|
||||
}
|
||||
if llvm_version < (18, 0, 0) {
|
||||
if sess.target.arch == "x86" || sess.target.arch == "x86_64" {
|
||||
// LLVM 18 adjusts i128 to be 128-bit aligned on x86 variants.
|
||||
// Earlier LLVMs leave this as default alignment, so remove it.
|
||||
// See https://reviews.llvm.org/D86310
|
||||
target_data_layout = target_data_layout.replace("-i128:128", "");
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure the data-layout values hardcoded remain the defaults.
|
||||
if sess.target.is_builtin {
|
||||
|
|
|
@ -14,7 +14,7 @@ use rustc_data_structures::memmap::Mmap;
|
|||
use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::emitter::Emitter;
|
||||
use rustc_errors::{translation::Translate, DiagCtxt, DiagnosticId, FatalError, Level};
|
||||
use rustc_errors::{translation::Translate, DiagCtxt, FatalError, Level};
|
||||
use rustc_errors::{DiagnosticBuilder, DiagnosticMessage, Style};
|
||||
use rustc_fs_util::link_or_copy;
|
||||
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||
|
@ -1000,7 +1000,7 @@ type DiagnosticArgName<'source> = Cow<'source, str>;
|
|||
struct Diagnostic {
|
||||
msgs: Vec<(DiagnosticMessage, Style)>,
|
||||
args: FxHashMap<DiagnosticArgName<'static>, rustc_errors::DiagnosticArgValue<'static>>,
|
||||
code: Option<DiagnosticId>,
|
||||
code: Option<String>,
|
||||
lvl: Level,
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, ValidityRequirement};
|
|||
use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_session::config::OptLevel;
|
||||
use rustc_span::{sym, Span, Symbol};
|
||||
use rustc_span::{source_map::Spanned, sym, Span, Symbol};
|
||||
use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg};
|
||||
use rustc_target::abi::{self, HasDataLayout, WrappingRange};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
@ -742,7 +742,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
bx: &mut Bx,
|
||||
terminator: &mir::Terminator<'tcx>,
|
||||
func: &mir::Operand<'tcx>,
|
||||
args: &[mir::Operand<'tcx>],
|
||||
args: &[Spanned<mir::Operand<'tcx>>],
|
||||
destination: mir::Place<'tcx>,
|
||||
target: Option<mir::BasicBlock>,
|
||||
unwind: mir::UnwindAction,
|
||||
|
@ -793,7 +793,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
let extra_args = &args[sig.inputs().skip_binder().len()..];
|
||||
let extra_args = bx.tcx().mk_type_list_from_iter(extra_args.iter().map(|op_arg| {
|
||||
let op_ty = op_arg.ty(self.mir, bx.tcx());
|
||||
let op_ty = op_arg.node.ty(self.mir, bx.tcx());
|
||||
self.monomorphize(op_ty)
|
||||
}));
|
||||
|
||||
|
@ -863,7 +863,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
// checked by const-qualification, which also
|
||||
// promotes any complex rvalues to constants.
|
||||
if i == 2 && intrinsic == sym::simd_shuffle {
|
||||
if let mir::Operand::Constant(constant) = arg {
|
||||
if let mir::Operand::Constant(constant) = &arg.node {
|
||||
let (llval, ty) = self.simd_shuffle_indices(bx, constant);
|
||||
return OperandRef {
|
||||
val: Immediate(llval),
|
||||
|
@ -874,7 +874,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
}
|
||||
}
|
||||
|
||||
self.codegen_operand(bx, arg)
|
||||
self.codegen_operand(bx, &arg.node)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -910,7 +910,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
let mut copied_constant_arguments = vec![];
|
||||
'make_args: for (i, arg) in first_args.iter().enumerate() {
|
||||
let mut op = self.codegen_operand(bx, arg);
|
||||
let mut op = self.codegen_operand(bx, &arg.node);
|
||||
|
||||
if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
|
||||
match op.val {
|
||||
|
@ -988,7 +988,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
// The callee needs to own the argument memory if we pass it
|
||||
// by-ref, so make a local copy of non-immediate constants.
|
||||
match (arg, op.val) {
|
||||
match (&arg.node, op.val) {
|
||||
(&mir::Operand::Copy(_), Ref(_, None, _))
|
||||
| (&mir::Operand::Constant(_), Ref(_, None, _)) => {
|
||||
let tmp = PlaceRef::alloca(bx, op.layout);
|
||||
|
@ -1003,7 +1003,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
self.codegen_argument(bx, op, &mut llargs, &fn_abi.args[i]);
|
||||
}
|
||||
let num_untupled = untuple.map(|tup| {
|
||||
self.codegen_arguments_untupled(bx, tup, &mut llargs, &fn_abi.args[first_args.len()..])
|
||||
self.codegen_arguments_untupled(
|
||||
bx,
|
||||
&tup.node,
|
||||
&mut llargs,
|
||||
&fn_abi.args[first_args.len()..],
|
||||
)
|
||||
});
|
||||
|
||||
let needs_location =
|
||||
|
|
|
@ -225,17 +225,10 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
|
|||
tcx: TyCtxt<'tcx>,
|
||||
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
|
||||
) -> ::rustc_middle::mir::interpret::EvalToConstValueResult<'tcx> {
|
||||
// see comment in eval_to_allocation_raw_provider for what we're doing here
|
||||
if key.param_env.reveal() == Reveal::All {
|
||||
let mut key = key;
|
||||
key.param_env = key.param_env.with_user_facing();
|
||||
match tcx.eval_to_const_value_raw(key) {
|
||||
// try again with reveal all as requested
|
||||
Err(ErrorHandled::TooGeneric(_)) => {}
|
||||
// deduplicate calls
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
// Const eval always happens in Reveal::All mode in order to be able to use the hidden types of
|
||||
// opaque types. This is needed for trivial things like `size_of`, but also for using associated
|
||||
// types that are not specified in the opaque type.
|
||||
assert_eq!(key.param_env.reveal(), Reveal::All);
|
||||
|
||||
// We call `const_eval` for zero arg intrinsics, too, in order to cache their value.
|
||||
// Catch such calls and evaluate them instead of trying to load a constant's MIR.
|
||||
|
@ -265,24 +258,11 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
|
|||
tcx: TyCtxt<'tcx>,
|
||||
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
|
||||
) -> ::rustc_middle::mir::interpret::EvalToAllocationRawResult<'tcx> {
|
||||
// Because the constant is computed twice (once per value of `Reveal`), we are at risk of
|
||||
// reporting the same error twice here. To resolve this, we check whether we can evaluate the
|
||||
// constant in the more restrictive `Reveal::UserFacing`, which most likely already was
|
||||
// computed. For a large percentage of constants that will already have succeeded. Only
|
||||
// associated constants of generic functions will fail due to not enough monomorphization
|
||||
// information being available.
|
||||
// Const eval always happens in Reveal::All mode in order to be able to use the hidden types of
|
||||
// opaque types. This is needed for trivial things like `size_of`, but also for using associated
|
||||
// types that are not specified in the opaque type.
|
||||
|
||||
// In case we fail in the `UserFacing` variant, we just do the real computation.
|
||||
if key.param_env.reveal() == Reveal::All {
|
||||
let mut key = key;
|
||||
key.param_env = key.param_env.with_user_facing();
|
||||
match tcx.eval_to_allocation_raw(key) {
|
||||
// try again with reveal all as requested
|
||||
Err(ErrorHandled::TooGeneric(_)) => {}
|
||||
// deduplicate calls
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
assert_eq!(key.param_env.reveal(), Reveal::All);
|
||||
if cfg!(debug_assertions) {
|
||||
// Make sure we format the instance even if we do not print it.
|
||||
// This serves as a regression test against an ICE on printing.
|
||||
|
|
|
@ -1209,21 +1209,28 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0..num_copies {
|
||||
ptr::copy(
|
||||
src_bytes,
|
||||
dest_bytes.add((size * i).bytes_usize()), // `Size` multiplication
|
||||
size.bytes_usize(),
|
||||
);
|
||||
let size_in_bytes = size.bytes_usize();
|
||||
// For particularly large arrays (where this is perf-sensitive) it's common that
|
||||
// we're writing a single byte repeatedly. So, optimize that case to a memset.
|
||||
if size_in_bytes == 1 && num_copies >= 1 {
|
||||
// SAFETY: `src_bytes` would be read from anyway by copies below (num_copies >= 1).
|
||||
// Since size_in_bytes = 1, then the `init.no_bytes_init()` check above guarantees
|
||||
// that this read at type `u8` is OK -- it must be an initialized byte.
|
||||
let value = *src_bytes;
|
||||
dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
|
||||
} else if src_alloc_id == dest_alloc_id {
|
||||
let mut dest_ptr = dest_bytes;
|
||||
for _ in 0..num_copies {
|
||||
ptr::copy(src_bytes, dest_ptr, size_in_bytes);
|
||||
dest_ptr = dest_ptr.add(size_in_bytes);
|
||||
}
|
||||
} else {
|
||||
for i in 0..num_copies {
|
||||
ptr::copy_nonoverlapping(
|
||||
src_bytes,
|
||||
dest_bytes.add((size * i).bytes_usize()), // `Size` multiplication
|
||||
size.bytes_usize(),
|
||||
);
|
||||
let mut dest_ptr = dest_bytes;
|
||||
for _ in 0..num_copies {
|
||||
ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
|
||||
dest_ptr = dest_ptr.add(size_in_bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use rustc_middle::{
|
|||
AdtDef, Instance, Ty,
|
||||
},
|
||||
};
|
||||
use rustc_span::sym;
|
||||
use rustc_span::{source_map::Spanned, sym};
|
||||
use rustc_target::abi::{self, FieldIdx};
|
||||
use rustc_target::abi::{
|
||||
call::{ArgAbi, FnAbi, PassMode},
|
||||
|
@ -242,13 +242,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
/// Evaluate the arguments of a function call
|
||||
pub(super) fn eval_fn_call_arguments(
|
||||
&self,
|
||||
ops: &[mir::Operand<'tcx>],
|
||||
ops: &[Spanned<mir::Operand<'tcx>>],
|
||||
) -> InterpResult<'tcx, Vec<FnArg<'tcx, M::Provenance>>> {
|
||||
ops.iter()
|
||||
.map(|op| {
|
||||
Ok(match op {
|
||||
Ok(match &op.node {
|
||||
mir::Operand::Move(place) => FnArg::InPlace(self.eval_place(*place)?),
|
||||
_ => FnArg::Copy(self.eval_operand(op, None)?),
|
||||
_ => FnArg::Copy(self.eval_operand(&op.node, None)?),
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
|
|
|
@ -804,7 +804,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
|||
|
||||
// const-eval of the `begin_panic` fn assumes the argument is `&str`
|
||||
if Some(callee) == tcx.lang_items().begin_panic_fn() {
|
||||
match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
|
||||
match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
|
||||
ty::Ref(_, ty, _) if ty.is_str() => return,
|
||||
_ => self.check_op(ops::PanicNonStr),
|
||||
}
|
||||
|
@ -812,7 +812,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
|||
|
||||
// const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str`
|
||||
if tcx.has_attr(callee, sym::rustc_const_panic_str) {
|
||||
match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
|
||||
match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
|
||||
ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
|
||||
{
|
||||
return;
|
||||
|
|
|
@ -402,7 +402,7 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
|
|||
);
|
||||
}
|
||||
for arg in args {
|
||||
if let Operand::Move(place) = arg {
|
||||
if let Operand::Move(place) = &arg.node {
|
||||
if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
|
||||
// This is bad! The callee will expect the memory to be aligned.
|
||||
self.fail(
|
||||
|
|
|
@ -13,7 +13,7 @@ indexmap = { version = "2.0.0" }
|
|||
itertools = "0.11"
|
||||
jobserver_crate = { version = "0.1.27", package = "jobserver" }
|
||||
libc = "0.2"
|
||||
measureme = "10.0.0"
|
||||
measureme = "11"
|
||||
rustc-hash = "1.1.0"
|
||||
rustc-rayon = { version = "0.5.0", optional = true }
|
||||
rustc-rayon-core = { version = "0.5.0", optional = true }
|
||||
|
|
|
@ -18,27 +18,10 @@ where
|
|||
return &[];
|
||||
};
|
||||
|
||||
// Now search forward to find the *last* one.
|
||||
let mut end = start;
|
||||
let mut previous = start;
|
||||
let mut step = 1;
|
||||
loop {
|
||||
end = end.saturating_add(step).min(size);
|
||||
if end == size || key_fn(&data[end]) != *key {
|
||||
break;
|
||||
}
|
||||
previous = end;
|
||||
step *= 2;
|
||||
}
|
||||
step = end - previous;
|
||||
while step > 1 {
|
||||
let half = step / 2;
|
||||
let mid = end - half;
|
||||
if key_fn(&data[mid]) != *key {
|
||||
end = mid;
|
||||
}
|
||||
step -= half;
|
||||
}
|
||||
// Find the first entry with key > `key`. Skip `start` entries since
|
||||
// key_fn(&data[start]) == *key
|
||||
let offset = start + 1;
|
||||
let end = data[offset..].partition_point(|x| key_fn(x) <= *key) + offset;
|
||||
|
||||
&data[start..end]
|
||||
}
|
||||
|
|
|
@ -75,11 +75,21 @@ impl fmt::LowerHex for Hash64 {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub struct Hash128 {
|
||||
inner: u128,
|
||||
}
|
||||
|
||||
// We expect Hash128 to be well mixed. So there's no point in hashing both parts.
|
||||
//
|
||||
// This also allows using Hash128-containing types in UnHash-based hashmaps, which would otherwise
|
||||
// debug_assert! that we're hashing more than a single u64.
|
||||
impl std::hash::Hash for Hash128 {
|
||||
fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
|
||||
h.write_u64(self.truncate().as_u64());
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash128 {
|
||||
#[inline]
|
||||
pub fn truncate(self) -> Hash64 {
|
||||
|
|
|
@ -177,7 +177,6 @@ cfg_match! {
|
|||
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
|
||||
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
|
||||
[crate::sync::RwLock<T> where T: DynSend + DynSync]
|
||||
[crate::sync::OneThread<T> where T]
|
||||
[crate::sync::WorkerLocal<T> where T: DynSend]
|
||||
[crate::intern::Interned<'a, T> where 'a, T: DynSync]
|
||||
[crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool]
|
||||
|
|
|
@ -314,7 +314,19 @@ impl_stable_traits_for_trivial_type!(char);
|
|||
impl_stable_traits_for_trivial_type!(());
|
||||
|
||||
impl_stable_traits_for_trivial_type!(Hash64);
|
||||
impl_stable_traits_for_trivial_type!(Hash128);
|
||||
|
||||
// We need a custom impl as the default hash function will only hash half the bits. For stable
|
||||
// hashing we want to hash the full 128-bit hash.
|
||||
impl<CTX> HashStable<CTX> for Hash128 {
|
||||
#[inline]
|
||||
fn hash_stable(&self, _: &mut CTX, hasher: &mut StableHasher) {
|
||||
self.as_u128().hash(hasher);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl StableOrd for Hash128 {
|
||||
const CAN_USE_UNSTABLE_SORT: bool = true;
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for ! {
|
||||
fn hash_stable(&self, _ctx: &mut CTX, _hasher: &mut StableHasher) {
|
||||
|
|
|
@ -43,7 +43,6 @@
|
|||
pub use crate::marker::*;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
mod lock;
|
||||
pub use lock::{Lock, LockGuard, Mode};
|
||||
|
@ -309,8 +308,6 @@ cfg_match! {
|
|||
|
||||
use parking_lot::RwLock as InnerRwLock;
|
||||
|
||||
use std::thread;
|
||||
|
||||
/// This makes locks panic if they are already held.
|
||||
/// It is only useful when you are running in a single thread
|
||||
const ERROR_CHECKING: bool = false;
|
||||
|
@ -445,56 +442,3 @@ impl<T: Clone> Clone for RwLock<T> {
|
|||
RwLock::new(self.borrow().clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// A type which only allows its inner value to be used in one thread.
|
||||
/// It will panic if it is used on multiple threads.
|
||||
#[derive(Debug)]
|
||||
pub struct OneThread<T> {
|
||||
#[cfg(parallel_compiler)]
|
||||
thread: thread::ThreadId,
|
||||
inner: T,
|
||||
}
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T> std::marker::Sync for OneThread<T> {}
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T> std::marker::Send for OneThread<T> {}
|
||||
|
||||
impl<T> OneThread<T> {
|
||||
#[inline(always)]
|
||||
fn check(&self) {
|
||||
#[cfg(parallel_compiler)]
|
||||
assert_eq!(thread::current().id(), self.thread);
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn new(inner: T) -> Self {
|
||||
OneThread {
|
||||
#[cfg(parallel_compiler)]
|
||||
thread: thread::current().id(),
|
||||
inner,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn into_inner(value: Self) -> T {
|
||||
value.check();
|
||||
value.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for OneThread<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
self.check();
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for OneThread<T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
self.check();
|
||||
&mut self.inner
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ use rustc_lint::unerased_lint_store;
|
|||
use rustc_metadata::creader::MetadataLoader;
|
||||
use rustc_metadata::locator;
|
||||
use rustc_session::config::{nightly_options, CG_OPTIONS, Z_OPTIONS};
|
||||
use rustc_session::config::{ErrorOutputType, Input, OutFileName, OutputType, TrimmedDefPaths};
|
||||
use rustc_session::config::{ErrorOutputType, Input, OutFileName, OutputType};
|
||||
use rustc_session::getopts::{self, Matches};
|
||||
use rustc_session::lint::{Lint, LintId};
|
||||
use rustc_session::{config, EarlyDiagCtxt, Session};
|
||||
|
@ -204,7 +204,7 @@ impl Callbacks for TimePassesCallbacks {
|
|||
//
|
||||
self.time_passes = (config.opts.prints.is_empty() && config.opts.unstable_opts.time_passes)
|
||||
.then(|| config.opts.unstable_opts.time_passes_format);
|
||||
config.opts.trimmed_def_paths = TrimmedDefPaths::GoodPath;
|
||||
config.opts.trimmed_def_paths = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
#### Note: this error code is no longer emitted by the compiler.
|
||||
|
||||
`async` non-`move` closures with parameters are currently not supported.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,edition2018,E0708
|
||||
```edition2018
|
||||
#![feature(async_closure)]
|
||||
|
||||
fn main() {
|
||||
let add_one = async |num: u8| { // error!
|
||||
let add_one = async |num: u8| {
|
||||
num + 1
|
||||
};
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ Invalid argument for the `offset_of!` macro.
|
|||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0795
|
||||
#![feature(offset_of, offset_of_enum)]
|
||||
#![feature(offset_of_enum, offset_of_nested)]
|
||||
|
||||
let x = std::mem::offset_of!(Option<u8>, Some);
|
||||
```
|
||||
|
@ -16,7 +16,7 @@ The offset of the contained `u8` in the `Option<u8>` can be found by specifying
|
|||
the field name `0`:
|
||||
|
||||
```
|
||||
#![feature(offset_of, offset_of_enum)]
|
||||
#![feature(offset_of_enum, offset_of_nested)]
|
||||
|
||||
let x: usize = std::mem::offset_of!(Option<u8>, Some.0);
|
||||
```
|
||||
|
|
|
@ -9,8 +9,8 @@ use crate::emitter::FileWithAnnotatedLines;
|
|||
use crate::snippet::Line;
|
||||
use crate::translation::{to_fluent_args, Translate};
|
||||
use crate::{
|
||||
CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, Emitter, FluentBundle,
|
||||
LazyFallbackBundle, Level, MultiSpan, Style, SubDiagnostic,
|
||||
CodeSuggestion, Diagnostic, DiagnosticMessage, Emitter, FluentBundle, LazyFallbackBundle,
|
||||
Level, MultiSpan, Style, SubDiagnostic,
|
||||
};
|
||||
use annotate_snippets::{Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
@ -127,7 +127,7 @@ impl AnnotateSnippetEmitter {
|
|||
level: &Level,
|
||||
messages: &[(DiagnosticMessage, Style)],
|
||||
args: &FluentArgs<'_>,
|
||||
code: &Option<DiagnosticId>,
|
||||
code: &Option<String>,
|
||||
msp: &MultiSpan,
|
||||
_children: &[SubDiagnostic],
|
||||
_suggestions: &[CodeSuggestion],
|
||||
|
@ -181,11 +181,7 @@ impl AnnotateSnippetEmitter {
|
|||
let snippet = Snippet {
|
||||
title: Some(Annotation {
|
||||
label: Some(&message),
|
||||
id: code.as_ref().map(|c| match c {
|
||||
DiagnosticId::Error(val) | DiagnosticId::Lint { name: val, .. } => {
|
||||
val.as_str()
|
||||
}
|
||||
}),
|
||||
id: code.as_deref(),
|
||||
annotation_type: annotation_type_for_level(*level),
|
||||
}),
|
||||
footer: vec![],
|
||||
|
|
|
@ -104,7 +104,7 @@ pub struct Diagnostic {
|
|||
pub(crate) level: Level,
|
||||
|
||||
pub messages: Vec<(DiagnosticMessage, Style)>,
|
||||
pub code: Option<DiagnosticId>,
|
||||
pub code: Option<String>,
|
||||
pub span: MultiSpan,
|
||||
pub children: Vec<SubDiagnostic>,
|
||||
pub suggestions: Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
||||
|
@ -115,9 +115,9 @@ pub struct Diagnostic {
|
|||
/// `span` if there is one. Otherwise, it is `DUMMY_SP`.
|
||||
pub sort_span: Span,
|
||||
|
||||
/// If diagnostic is from Lint, custom hash function ignores notes
|
||||
/// otherwise hash is based on the all the fields
|
||||
pub is_lint: bool,
|
||||
/// If diagnostic is from Lint, custom hash function ignores children.
|
||||
/// Otherwise hash is based on the all the fields.
|
||||
pub is_lint: Option<IsLint>,
|
||||
|
||||
/// With `-Ztrack_diagnostics` enabled,
|
||||
/// we print where in rustc this error was emitted.
|
||||
|
@ -146,13 +146,11 @@ impl fmt::Display for DiagnosticLocation {
|
|||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)]
|
||||
pub enum DiagnosticId {
|
||||
Error(String),
|
||||
Lint {
|
||||
name: String,
|
||||
/// Indicates whether this lint should show up in cargo's future breakage report.
|
||||
has_future_breakage: bool,
|
||||
},
|
||||
pub struct IsLint {
|
||||
/// The lint name.
|
||||
pub(crate) name: String,
|
||||
/// Indicates whether this lint should show up in cargo's future breakage report.
|
||||
has_future_breakage: bool,
|
||||
}
|
||||
|
||||
/// A "sub"-diagnostic attached to a parent diagnostic.
|
||||
|
@ -231,7 +229,7 @@ impl Diagnostic {
|
|||
suggestions: Ok(vec![]),
|
||||
args: Default::default(),
|
||||
sort_span: DUMMY_SP,
|
||||
is_lint: false,
|
||||
is_lint: None,
|
||||
emitted_at: DiagnosticLocation::caller(),
|
||||
}
|
||||
}
|
||||
|
@ -288,16 +286,13 @@ impl Diagnostic {
|
|||
|
||||
/// Indicates whether this diagnostic should show up in cargo's future breakage report.
|
||||
pub(crate) fn has_future_breakage(&self) -> bool {
|
||||
match self.code {
|
||||
Some(DiagnosticId::Lint { has_future_breakage, .. }) => has_future_breakage,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.is_lint, Some(IsLint { has_future_breakage: true, .. }))
|
||||
}
|
||||
|
||||
pub(crate) fn is_force_warn(&self) -> bool {
|
||||
match self.level {
|
||||
Level::ForceWarning(_) => {
|
||||
assert!(self.is_lint);
|
||||
assert!(self.is_lint.is_some());
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
|
@ -893,12 +888,12 @@ impl Diagnostic {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn is_lint(&mut self) -> &mut Self {
|
||||
self.is_lint = true;
|
||||
pub fn is_lint(&mut self, name: String, has_future_breakage: bool) -> &mut Self {
|
||||
self.is_lint = Some(IsLint { name, has_future_breakage });
|
||||
self
|
||||
}
|
||||
|
||||
pub fn code(&mut self, s: DiagnosticId) -> &mut Self {
|
||||
pub fn code(&mut self, s: String) -> &mut Self {
|
||||
self.code = Some(s);
|
||||
self
|
||||
}
|
||||
|
@ -908,8 +903,8 @@ impl Diagnostic {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn get_code(&self) -> Option<DiagnosticId> {
|
||||
self.code.clone()
|
||||
pub fn get_code(&self) -> Option<&str> {
|
||||
self.code.as_deref()
|
||||
}
|
||||
|
||||
pub fn primary_message(&mut self, msg: impl Into<DiagnosticMessage>) -> &mut Self {
|
||||
|
@ -995,7 +990,8 @@ impl Diagnostic {
|
|||
&Level,
|
||||
&[(DiagnosticMessage, Style)],
|
||||
Vec<(&Cow<'static, str>, &DiagnosticArgValue<'static>)>,
|
||||
&Option<DiagnosticId>,
|
||||
&Option<String>,
|
||||
&Option<IsLint>,
|
||||
&MultiSpan,
|
||||
&Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
||||
Option<&[SubDiagnostic]>,
|
||||
|
@ -1005,9 +1001,10 @@ impl Diagnostic {
|
|||
&self.messages,
|
||||
self.args().collect(),
|
||||
&self.code,
|
||||
&self.is_lint,
|
||||
&self.span,
|
||||
&self.suggestions,
|
||||
(if self.is_lint { None } else { Some(&self.children) }),
|
||||
(if self.is_lint.is_some() { None } else { Some(&self.children) }),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use crate::diagnostic::IntoDiagnosticArg;
|
||||
use crate::{DiagCtxt, Level, MultiSpan, StashKey};
|
||||
use crate::{
|
||||
Diagnostic, DiagnosticId, DiagnosticMessage, DiagnosticStyledString, ErrorGuaranteed,
|
||||
ExplicitBug, SubdiagnosticMessage,
|
||||
Diagnostic, DiagnosticMessage, DiagnosticStyledString, ErrorGuaranteed, ExplicitBug,
|
||||
SubdiagnosticMessage,
|
||||
};
|
||||
use rustc_lint_defs::Applicability;
|
||||
use rustc_span::source_map::Spanned;
|
||||
|
@ -395,8 +395,11 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
|||
forward!((span, with_span)(
|
||||
sp: impl Into<MultiSpan>,
|
||||
));
|
||||
forward!((is_lint, with_is_lint)(
|
||||
name: String, has_future_breakage: bool,
|
||||
));
|
||||
forward!((code, with_code)(
|
||||
s: DiagnosticId,
|
||||
s: String,
|
||||
));
|
||||
forward!((arg, with_arg)(
|
||||
name: impl Into<Cow<'static, str>>, arg: impl IntoDiagnosticArg,
|
||||
|
@ -437,15 +440,11 @@ impl<G: EmissionGuarantee> Drop for DiagnosticBuilder<'_, G> {
|
|||
#[macro_export]
|
||||
macro_rules! struct_span_code_err {
|
||||
($dcx:expr, $span:expr, $code:ident, $($message:tt)*) => ({
|
||||
$dcx.struct_span_err(
|
||||
$span,
|
||||
format!($($message)*),
|
||||
)
|
||||
.with_code($crate::error_code!($code))
|
||||
$dcx.struct_span_err($span, format!($($message)*)).with_code($crate::error_code!($code))
|
||||
})
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! error_code {
|
||||
($code:ident) => {{ $crate::DiagnosticId::Error(stringify!($code).to_owned()) }};
|
||||
($code:ident) => {{ stringify!($code).to_owned() }};
|
||||
}
|
||||
|
|
|
@ -16,9 +16,9 @@ use crate::snippet::{
|
|||
use crate::styled_buffer::StyledBuffer;
|
||||
use crate::translation::{to_fluent_args, Translate};
|
||||
use crate::{
|
||||
diagnostic::DiagnosticLocation, CodeSuggestion, DiagCtxt, Diagnostic, DiagnosticId,
|
||||
DiagnosticMessage, FluentBundle, LazyFallbackBundle, Level, MultiSpan, SubDiagnostic,
|
||||
SubstitutionHighlight, SuggestionStyle, TerminalUrl,
|
||||
diagnostic::DiagnosticLocation, CodeSuggestion, DiagCtxt, Diagnostic, DiagnosticMessage,
|
||||
FluentBundle, LazyFallbackBundle, Level, MultiSpan, SubDiagnostic, SubstitutionHighlight,
|
||||
SuggestionStyle, TerminalUrl,
|
||||
};
|
||||
use rustc_lint_defs::pluralize;
|
||||
|
||||
|
@ -1309,7 +1309,7 @@ impl HumanEmitter {
|
|||
msp: &MultiSpan,
|
||||
msgs: &[(DiagnosticMessage, Style)],
|
||||
args: &FluentArgs<'_>,
|
||||
code: &Option<DiagnosticId>,
|
||||
code: &Option<String>,
|
||||
level: &Level,
|
||||
max_line_num_len: usize,
|
||||
is_secondary: bool,
|
||||
|
@ -1336,14 +1336,13 @@ impl HumanEmitter {
|
|||
buffer.append(0, level.to_str(), Style::Level(*level));
|
||||
label_width += level.to_str().len();
|
||||
}
|
||||
// only render error codes, not lint codes
|
||||
if let Some(DiagnosticId::Error(ref code)) = *code {
|
||||
if let Some(code) = code {
|
||||
buffer.append(0, "[", Style::Level(*level));
|
||||
let code = if let TerminalUrl::Yes = self.terminal_url {
|
||||
let path = "https://doc.rust-lang.org/error_codes";
|
||||
format!("\x1b]8;;{path}/{code}.html\x07{code}\x1b]8;;\x07")
|
||||
Cow::Owned(format!("\x1b]8;;{path}/{code}.html\x07{code}\x1b]8;;\x07"))
|
||||
} else {
|
||||
code.clone()
|
||||
Cow::Borrowed(code)
|
||||
};
|
||||
buffer.append(0, &code, Style::Level(*level));
|
||||
buffer.append(0, "]", Style::Level(*level));
|
||||
|
@ -2077,7 +2076,7 @@ impl HumanEmitter {
|
|||
level: &Level,
|
||||
messages: &[(DiagnosticMessage, Style)],
|
||||
args: &FluentArgs<'_>,
|
||||
code: &Option<DiagnosticId>,
|
||||
code: &Option<String>,
|
||||
span: &MultiSpan,
|
||||
children: &[SubDiagnostic],
|
||||
suggestions: &[CodeSuggestion],
|
||||
|
|
|
@ -15,10 +15,9 @@ use termcolor::{ColorSpec, WriteColor};
|
|||
use crate::emitter::{should_show_source_code, Emitter, HumanReadableErrorType};
|
||||
use crate::registry::Registry;
|
||||
use crate::translation::{to_fluent_args, Translate};
|
||||
use crate::DiagnosticId;
|
||||
use crate::{
|
||||
CodeSuggestion, FluentBundle, LazyFallbackBundle, MultiSpan, SpanLabel, SubDiagnostic,
|
||||
TerminalUrl,
|
||||
diagnostic::IsLint, CodeSuggestion, FluentBundle, LazyFallbackBundle, MultiSpan, SpanLabel,
|
||||
SubDiagnostic, TerminalUrl,
|
||||
};
|
||||
use rustc_lint_defs::Applicability;
|
||||
|
||||
|
@ -301,7 +300,8 @@ struct DiagnosticSpanMacroExpansion {
|
|||
|
||||
#[derive(Serialize)]
|
||||
struct DiagnosticCode {
|
||||
/// The code itself.
|
||||
/// The error code (e.g. "E1234"), if the diagnostic has one. Or the lint
|
||||
/// name, if it's a lint without an error code.
|
||||
code: String,
|
||||
/// An explanation for the code.
|
||||
explanation: Option<&'static str>,
|
||||
|
@ -399,9 +399,21 @@ impl Diagnostic {
|
|||
let output = String::from_utf8(output).unwrap();
|
||||
|
||||
let translated_message = je.translate_messages(&diag.messages, &args);
|
||||
|
||||
let code = if let Some(code) = &diag.code {
|
||||
Some(DiagnosticCode {
|
||||
code: code.to_string(),
|
||||
explanation: je.registry.as_ref().unwrap().try_find_description(&code).ok(),
|
||||
})
|
||||
} else if let Some(IsLint { name, .. }) = &diag.is_lint {
|
||||
Some(DiagnosticCode { code: name.to_string(), explanation: None })
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
message: translated_message.to_string(),
|
||||
code: DiagnosticCode::map_opt_string(diag.code.clone(), je),
|
||||
code,
|
||||
level: diag.level.to_str(),
|
||||
spans: DiagnosticSpan::from_multispan(&diag.span, &args, je),
|
||||
children: diag
|
||||
|
@ -592,18 +604,3 @@ impl DiagnosticSpanLine {
|
|||
.unwrap_or_else(|_| vec![])
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticCode {
|
||||
fn map_opt_string(s: Option<DiagnosticId>, je: &JsonEmitter) -> Option<DiagnosticCode> {
|
||||
s.map(|s| {
|
||||
let s = match s {
|
||||
DiagnosticId::Error(s) => s,
|
||||
DiagnosticId::Lint { name, .. } => name,
|
||||
};
|
||||
let je_result =
|
||||
je.registry.as_ref().map(|registry| registry.try_find_description(&s)).unwrap();
|
||||
|
||||
DiagnosticCode { code: s, explanation: je_result.ok() }
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ extern crate tracing;
|
|||
extern crate self as rustc_errors;
|
||||
|
||||
pub use diagnostic::{
|
||||
AddToDiagnostic, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgValue, DiagnosticId,
|
||||
AddToDiagnostic, DecorateLint, Diagnostic, DiagnosticArg, DiagnosticArgValue,
|
||||
DiagnosticStyledString, IntoDiagnosticArg, SubDiagnostic,
|
||||
};
|
||||
pub use diagnostic_builder::{
|
||||
|
@ -442,13 +442,13 @@ struct DiagCtxtInner {
|
|||
/// This is used for the `good_path_delayed_bugs` check.
|
||||
suppressed_expected_diag: bool,
|
||||
|
||||
/// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
|
||||
/// This set contains the code of all emitted diagnostics to avoid
|
||||
/// emitting the same diagnostic with extended help (`--teach`) twice, which
|
||||
/// would be unnecessary repetition.
|
||||
taught_diagnostics: FxHashSet<DiagnosticId>,
|
||||
taught_diagnostics: FxHashSet<String>,
|
||||
|
||||
/// Used to suggest rustc --explain `<error code>`
|
||||
emitted_diagnostic_codes: FxIndexSet<DiagnosticId>,
|
||||
emitted_diagnostic_codes: FxIndexSet<String>,
|
||||
|
||||
/// This set contains a hash of every diagnostic that has been emitted by
|
||||
/// this `DiagCtxt`. These hashes is used to avoid emitting the same error
|
||||
|
@ -676,7 +676,7 @@ impl DiagCtxt {
|
|||
let key = (span.with_parent(None), key);
|
||||
|
||||
if diag.is_error() {
|
||||
if diag.is_lint {
|
||||
if diag.is_lint.is_some() {
|
||||
inner.lint_err_count += 1;
|
||||
} else {
|
||||
inner.err_count += 1;
|
||||
|
@ -695,7 +695,7 @@ impl DiagCtxt {
|
|||
let key = (span.with_parent(None), key);
|
||||
let diag = inner.stashed_diagnostics.remove(&key)?;
|
||||
if diag.is_error() {
|
||||
if diag.is_lint {
|
||||
if diag.is_lint.is_some() {
|
||||
inner.lint_err_count -= 1;
|
||||
} else {
|
||||
inner.err_count -= 1;
|
||||
|
@ -715,9 +715,7 @@ impl DiagCtxt {
|
|||
|
||||
/// Construct a builder at the `Warning` level at the given `span` and with the `msg`.
|
||||
///
|
||||
/// Attempting to `.emit()` the builder will only emit if either:
|
||||
/// * `can_emit_warnings` is `true`
|
||||
/// * `is_force_warn` was set in `DiagnosticId::Lint`
|
||||
/// An `emit` call on the builder will only emit if `can_emit_warnings` is `true`.
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_span_warn(
|
||||
|
@ -730,9 +728,7 @@ impl DiagCtxt {
|
|||
|
||||
/// Construct a builder at the `Warning` level with the `msg`.
|
||||
///
|
||||
/// Attempting to `.emit()` the builder will only emit if either:
|
||||
/// * `can_emit_warnings` is `true`
|
||||
/// * `is_force_warn` was set in `DiagnosticId::Lint`
|
||||
/// An `emit` call on the builder will only emit if `can_emit_warnings` is `true`.
|
||||
#[rustc_lint_diagnostics]
|
||||
#[track_caller]
|
||||
pub fn struct_warn(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
|
||||
|
@ -1011,11 +1007,12 @@ impl DiagCtxt {
|
|||
let mut error_codes = inner
|
||||
.emitted_diagnostic_codes
|
||||
.iter()
|
||||
.filter_map(|x| match &x {
|
||||
DiagnosticId::Error(s) if registry.try_find_description(s).is_ok() => {
|
||||
Some(s.clone())
|
||||
.filter_map(|code| {
|
||||
if registry.try_find_description(code).is_ok().clone() {
|
||||
Some(code.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !error_codes.is_empty() {
|
||||
|
@ -1058,8 +1055,8 @@ impl DiagCtxt {
|
|||
///
|
||||
/// Used to suppress emitting the same error multiple times with extended explanation when
|
||||
/// calling `-Zteach`.
|
||||
pub fn must_teach(&self, code: &DiagnosticId) -> bool {
|
||||
self.inner.borrow_mut().taught_diagnostics.insert(code.clone())
|
||||
pub fn must_teach(&self, code: &str) -> bool {
|
||||
self.inner.borrow_mut().taught_diagnostics.insert(code.to_string())
|
||||
}
|
||||
|
||||
pub fn force_print_diagnostic(&self, db: Diagnostic) {
|
||||
|
@ -1231,7 +1228,7 @@ impl DiagCtxtInner {
|
|||
for diag in diags {
|
||||
// Decrement the count tracking the stash; emitting will increment it.
|
||||
if diag.is_error() {
|
||||
if diag.is_lint {
|
||||
if diag.is_lint.is_some() {
|
||||
self.lint_err_count -= 1;
|
||||
} else {
|
||||
self.err_count -= 1;
|
||||
|
@ -1363,7 +1360,7 @@ impl DiagCtxtInner {
|
|||
self.has_printed = true;
|
||||
}
|
||||
if diagnostic.is_error() {
|
||||
if diagnostic.is_lint {
|
||||
if diagnostic.is_lint.is_some() {
|
||||
self.lint_err_count += 1;
|
||||
} else {
|
||||
self.err_count += 1;
|
||||
|
|
|
@ -16,6 +16,9 @@ expand_attributes_wrong_form =
|
|||
expand_cannot_be_name_of_macro =
|
||||
`{$trait_ident}` cannot be a name of {$macro_type} macro
|
||||
|
||||
expand_collapse_debuginfo_illegal =
|
||||
illegal value for attribute #[collapse_debuginfo(no|external|yes)]
|
||||
|
||||
expand_count_repetition_misplaced =
|
||||
`count` can not be placed inside the inner-most repetition
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
|
||||
use crate::base::ast::NestedMetaItem;
|
||||
use crate::errors;
|
||||
use crate::expand::{self, AstFragment, Invocation};
|
||||
use crate::module::DirOwnership;
|
||||
|
@ -19,6 +20,7 @@ use rustc_feature::Features;
|
|||
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
|
||||
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics, RegisteredTools};
|
||||
use rustc_parse::{parser, MACRO_ARGUMENTS};
|
||||
use rustc_session::config::CollapseMacroDebuginfo;
|
||||
use rustc_session::errors::report_lit_error;
|
||||
use rustc_session::{parse::ParseSess, Limit, Session};
|
||||
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
|
||||
|
@ -664,8 +666,8 @@ pub enum SyntaxExtensionKind {
|
|||
/// A token-based attribute macro.
|
||||
Attr(
|
||||
/// An expander with signature (TokenStream, TokenStream) -> TokenStream.
|
||||
/// The first TokenSteam is the attribute itself, the second is the annotated item.
|
||||
/// The produced TokenSteam replaces the input TokenSteam.
|
||||
/// The first TokenStream is the attribute itself, the second is the annotated item.
|
||||
/// The produced TokenStream replaces the input TokenStream.
|
||||
Box<dyn AttrProcMacro + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
|
@ -685,7 +687,7 @@ pub enum SyntaxExtensionKind {
|
|||
/// A token-based derive macro.
|
||||
Derive(
|
||||
/// An expander with signature TokenStream -> TokenStream.
|
||||
/// The produced TokenSteam is appended to the input TokenSteam.
|
||||
/// The produced TokenStream is appended to the input TokenStream.
|
||||
///
|
||||
/// FIXME: The text above describes how this should work. Currently it
|
||||
/// is handled identically to `LegacyDerive`. It should be migrated to
|
||||
|
@ -761,6 +763,55 @@ impl SyntaxExtension {
|
|||
}
|
||||
}
|
||||
|
||||
fn collapse_debuginfo_by_name(sess: &Session, attr: &Attribute) -> CollapseMacroDebuginfo {
|
||||
use crate::errors::CollapseMacroDebuginfoIllegal;
|
||||
// #[collapse_debuginfo] without enum value (#[collapse_debuginfo(no/external/yes)])
|
||||
// considered as `yes`
|
||||
attr.meta_item_list().map_or(CollapseMacroDebuginfo::Yes, |l| {
|
||||
let [NestedMetaItem::MetaItem(item)] = &l[..] else {
|
||||
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: attr.span });
|
||||
return CollapseMacroDebuginfo::Unspecified;
|
||||
};
|
||||
if !item.is_word() {
|
||||
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: item.span });
|
||||
CollapseMacroDebuginfo::Unspecified
|
||||
} else {
|
||||
match item.name_or_empty() {
|
||||
sym::no => CollapseMacroDebuginfo::No,
|
||||
sym::external => CollapseMacroDebuginfo::External,
|
||||
sym::yes => CollapseMacroDebuginfo::Yes,
|
||||
_ => {
|
||||
sess.dcx().emit_err(CollapseMacroDebuginfoIllegal { span: item.span });
|
||||
CollapseMacroDebuginfo::Unspecified
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// if-ext - if macro from different crate (related to callsite code)
|
||||
/// | cmd \ attr | no | (unspecified) | external | yes |
|
||||
/// | no | no | no | no | no |
|
||||
/// | (unspecified) | no | no | if-ext | yes |
|
||||
/// | external | no | if-ext | if-ext | yes |
|
||||
/// | yes | yes | yes | yes | yes |
|
||||
fn get_collapse_debuginfo(sess: &Session, attrs: &[ast::Attribute], is_local: bool) -> bool {
|
||||
let collapse_debuginfo_attr = attr::find_by_name(attrs, sym::collapse_debuginfo)
|
||||
.map(|v| Self::collapse_debuginfo_by_name(sess, v))
|
||||
.unwrap_or(CollapseMacroDebuginfo::Unspecified);
|
||||
let flag = sess.opts.unstable_opts.collapse_macro_debuginfo;
|
||||
let attr = collapse_debuginfo_attr;
|
||||
let ext = !is_local;
|
||||
#[rustfmt::skip]
|
||||
let collapse_table = [
|
||||
[false, false, false, false],
|
||||
[false, false, ext, true],
|
||||
[false, ext, ext, true],
|
||||
[true, true, true, true],
|
||||
];
|
||||
collapse_table[flag as usize][attr as usize]
|
||||
}
|
||||
|
||||
/// Constructs a syntax extension with the given properties
|
||||
/// and other properties converted from attributes.
|
||||
pub fn new(
|
||||
|
@ -772,6 +823,7 @@ impl SyntaxExtension {
|
|||
edition: Edition,
|
||||
name: Symbol,
|
||||
attrs: &[ast::Attribute],
|
||||
is_local: bool,
|
||||
) -> SyntaxExtension {
|
||||
let allow_internal_unstable =
|
||||
attr::allow_internal_unstable(sess, attrs).collect::<Vec<Symbol>>();
|
||||
|
@ -780,8 +832,8 @@ impl SyntaxExtension {
|
|||
let local_inner_macros = attr::find_by_name(attrs, sym::macro_export)
|
||||
.and_then(|macro_export| macro_export.meta_item_list())
|
||||
.is_some_and(|l| attr::list_contains_name(&l, sym::local_inner_macros));
|
||||
let collapse_debuginfo = attr::contains_name(attrs, sym::collapse_debuginfo);
|
||||
tracing::debug!(?local_inner_macros, ?collapse_debuginfo, ?allow_internal_unsafe);
|
||||
let collapse_debuginfo = Self::get_collapse_debuginfo(sess, attrs, is_local);
|
||||
tracing::debug!(?name, ?local_inner_macros, ?collapse_debuginfo, ?allow_internal_unsafe);
|
||||
|
||||
let (builtin_name, helper_attrs) = attr::find_by_name(attrs, sym::rustc_builtin_macro)
|
||||
.map(|attr| {
|
||||
|
|
|
@ -58,6 +58,13 @@ pub(crate) struct ResolveRelativePath {
|
|||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(expand_collapse_debuginfo_illegal)]
|
||||
pub(crate) struct CollapseMacroDebuginfoIllegal {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(expand_macro_const_stability)]
|
||||
pub(crate) struct MacroConstStability {
|
||||
|
|
|
@ -367,6 +367,7 @@ pub fn compile_declarative_macro(
|
|||
edition,
|
||||
def.ident.name,
|
||||
&def.attrs,
|
||||
def.id != DUMMY_NODE_ID,
|
||||
)
|
||||
};
|
||||
let dummy_syn_ext = || (mk_syn_ext(Box::new(macro_rules_dummy_expander)), Vec::new());
|
||||
|
|
|
@ -469,7 +469,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
|||
|
||||
// `#[collapse_debuginfo]`
|
||||
gated!(
|
||||
collapse_debuginfo, Normal, template!(Word), WarnFollowing,
|
||||
collapse_debuginfo, Normal, template!(Word, List: "no|external|yes"), ErrorFollowing,
|
||||
experimental!(collapse_debuginfo)
|
||||
),
|
||||
|
||||
|
|
|
@ -549,7 +549,9 @@ declare_features! (
|
|||
/// casts in safe Rust to `dyn Trait` for such a `Trait` is also forbidden.
|
||||
(unstable, object_safe_for_dispatch, "1.40.0", Some(43561)),
|
||||
/// Allows using enums in offset_of!
|
||||
(unstable, offset_of_enum, "1.75.0", Some(106655)),
|
||||
(unstable, offset_of_enum, "1.75.0", Some(120141)),
|
||||
/// Allows using multiple nested field accesses in offset_of!
|
||||
(unstable, offset_of_nested, "CURRENT_RUSTC_VERSION", Some(120140)),
|
||||
/// Allows using `#[optimize(X)]`.
|
||||
(unstable, optimize_attribute, "1.34.0", Some(54882)),
|
||||
/// Allows macro attributes on expressions, statements and non-inline modules.
|
||||
|
|
|
@ -248,7 +248,7 @@ pub struct InferArg {
|
|||
}
|
||||
|
||||
impl InferArg {
|
||||
pub fn to_ty(&self) -> Ty<'_> {
|
||||
pub fn to_ty(&self) -> Ty<'static> {
|
||||
Ty { kind: TyKind::Infer, span: self.span, hir_id: self.hir_id }
|
||||
}
|
||||
}
|
||||
|
@ -841,7 +841,7 @@ pub struct OwnerNodes<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> OwnerNodes<'tcx> {
|
||||
fn node(&self) -> OwnerNode<'tcx> {
|
||||
pub fn node(&self) -> OwnerNode<'tcx> {
|
||||
use rustc_index::Idx;
|
||||
let node = self.nodes[ItemLocalId::new(0)].as_ref().unwrap().node;
|
||||
let node = node.as_owner().unwrap(); // Indexing must ensure it is an OwnerNode.
|
||||
|
@ -1015,7 +1015,7 @@ impl<'hir> Pat<'hir> {
|
|||
|
||||
use PatKind::*;
|
||||
match self.kind {
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) => true,
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true,
|
||||
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it),
|
||||
Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)),
|
||||
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)),
|
||||
|
@ -1042,7 +1042,7 @@ impl<'hir> Pat<'hir> {
|
|||
|
||||
use PatKind::*;
|
||||
match self.kind {
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) => {}
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {}
|
||||
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it),
|
||||
Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)),
|
||||
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)),
|
||||
|
@ -1205,6 +1205,9 @@ pub enum PatKind<'hir> {
|
|||
/// PatKind::Slice([Binding(a), Binding(b)], Some(Wild), [Binding(c), Binding(d)])
|
||||
/// ```
|
||||
Slice(&'hir [Pat<'hir>], Option<&'hir Pat<'hir>>, &'hir [Pat<'hir>]),
|
||||
|
||||
/// A placeholder for a pattern that wasn't well formed in some way.
|
||||
Err(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
/// A statement.
|
||||
|
@ -1302,7 +1305,7 @@ pub enum UnsafeSource {
|
|||
UserProvided,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
||||
pub struct BodyId {
|
||||
pub hir_id: HirId,
|
||||
}
|
||||
|
|
|
@ -655,7 +655,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) {
|
|||
walk_list!(visitor, visit_expr, lower_bound);
|
||||
walk_list!(visitor, visit_expr, upper_bound);
|
||||
}
|
||||
PatKind::Never | PatKind::Wild => (),
|
||||
PatKind::Never | PatKind::Wild | PatKind::Err(_) => (),
|
||||
PatKind::Slice(prepatterns, ref slice_pattern, postpatterns) => {
|
||||
walk_list!(visitor, visit_pat, prepatterns);
|
||||
walk_list!(visitor, visit_pat, slice_pattern);
|
||||
|
|
|
@ -374,3 +374,30 @@ pub static OPERATORS: &'static [LangItem] = &[
|
|||
LangItem::PartialEq,
|
||||
LangItem::PartialOrd,
|
||||
];
|
||||
|
||||
pub static BINARY_OPERATORS: &'static [LangItem] = &[
|
||||
LangItem::Add,
|
||||
LangItem::Sub,
|
||||
LangItem::Mul,
|
||||
LangItem::Div,
|
||||
LangItem::Rem,
|
||||
LangItem::BitXor,
|
||||
LangItem::BitAnd,
|
||||
LangItem::BitOr,
|
||||
LangItem::Shl,
|
||||
LangItem::Shr,
|
||||
LangItem::AddAssign,
|
||||
LangItem::SubAssign,
|
||||
LangItem::MulAssign,
|
||||
LangItem::DivAssign,
|
||||
LangItem::RemAssign,
|
||||
LangItem::BitXorAssign,
|
||||
LangItem::BitAndAssign,
|
||||
LangItem::BitOrAssign,
|
||||
LangItem::ShlAssign,
|
||||
LangItem::ShrAssign,
|
||||
LangItem::Index,
|
||||
LangItem::IndexMut,
|
||||
LangItem::PartialEq,
|
||||
LangItem::PartialOrd,
|
||||
];
|
||||
|
|
|
@ -12,7 +12,6 @@ use rustc_span::def_id::DefPathHash;
|
|||
pub trait HashStableContext:
|
||||
rustc_ast::HashStableContext + rustc_target::HashStableContext
|
||||
{
|
||||
fn hash_body_id(&mut self, _: BodyId, hasher: &mut StableHasher);
|
||||
}
|
||||
|
||||
impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for HirId {
|
||||
|
@ -80,12 +79,6 @@ impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for ForeignItemId
|
|||
}
|
||||
}
|
||||
|
||||
impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for BodyId {
|
||||
fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) {
|
||||
hcx.hash_body_id(*self, hasher)
|
||||
}
|
||||
}
|
||||
|
||||
// The following implementations of HashStable for `ItemId`, `TraitItemId`, and
|
||||
// `ImplItemId` deserve special attention. Normally we do not hash `NodeId`s within
|
||||
// the HIR, since they just signify a HIR nodes own path. But `ItemId` et al
|
||||
|
|
|
@ -9,6 +9,7 @@ doctest = false
|
|||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
itertools = "0.11"
|
||||
rustc_arena = { path = "../rustc_arena" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_attr = { path = "../rustc_attr" }
|
||||
|
|
|
@ -108,14 +108,16 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
|||
/// `param_ty` and `ast_bounds`. See `instantiate_poly_trait_ref`
|
||||
/// for more details.
|
||||
#[instrument(level = "debug", skip(self, ast_bounds, bounds))]
|
||||
pub(crate) fn add_bounds<'hir, I: Iterator<Item = &'hir hir::GenericBound<'hir>>>(
|
||||
pub(crate) fn add_bounds<'hir, I: Iterator<Item = &'hir hir::GenericBound<'tcx>>>(
|
||||
&self,
|
||||
param_ty: Ty<'tcx>,
|
||||
ast_bounds: I,
|
||||
bounds: &mut Bounds<'tcx>,
|
||||
bound_vars: &'tcx ty::List<ty::BoundVariableKind>,
|
||||
only_self_bounds: OnlySelfBounds,
|
||||
) {
|
||||
) where
|
||||
'tcx: 'hir,
|
||||
{
|
||||
for ast_bound in ast_bounds {
|
||||
match ast_bound {
|
||||
hir::GenericBound::Trait(poly_trait_ref, modifier) => {
|
||||
|
@ -179,7 +181,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
|||
pub(crate) fn compute_bounds(
|
||||
&self,
|
||||
param_ty: Ty<'tcx>,
|
||||
ast_bounds: &[hir::GenericBound<'_>],
|
||||
ast_bounds: &[hir::GenericBound<'tcx>],
|
||||
filter: PredicateFilter,
|
||||
) -> Bounds<'tcx> {
|
||||
let mut bounds = Bounds::default();
|
||||
|
|
|
@ -168,7 +168,7 @@ fn generic_arg_mismatch_err(
|
|||
/// instantiate a `GenericArg`.
|
||||
/// - `inferred_kind`: if no parameter was provided, and inference is enabled, then
|
||||
/// creates a suitable inference variable.
|
||||
pub fn create_args_for_parent_generic_args<'tcx, 'a>(
|
||||
pub fn create_args_for_parent_generic_args<'tcx: 'a, 'a>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: DefId,
|
||||
parent_args: &[ty::GenericArg<'tcx>],
|
||||
|
|
|
@ -78,14 +78,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
fn maybe_lint_impl_trait(&self, self_ty: &hir::Ty<'_>, diag: &mut Diagnostic) -> bool {
|
||||
let tcx = self.tcx();
|
||||
let parent_id = tcx.hir().get_parent_item(self_ty.hir_id).def_id;
|
||||
let (hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, generics, _), .. })
|
||||
| hir::Node::TraitItem(hir::TraitItem {
|
||||
kind: hir::TraitItemKind::Fn(sig, _),
|
||||
generics,
|
||||
..
|
||||
})) = tcx.hir_node_by_def_id(parent_id)
|
||||
else {
|
||||
return false;
|
||||
let (sig, generics, owner) = match tcx.hir_node_by_def_id(parent_id) {
|
||||
hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(sig, generics, _), .. }) => {
|
||||
(sig, generics, None)
|
||||
}
|
||||
hir::Node::TraitItem(hir::TraitItem {
|
||||
kind: hir::TraitItemKind::Fn(sig, _),
|
||||
generics,
|
||||
owner_id,
|
||||
..
|
||||
}) => (sig, generics, Some(tcx.parent(owner_id.to_def_id()))),
|
||||
_ => return false,
|
||||
};
|
||||
let Ok(trait_name) = tcx.sess.source_map().span_to_snippet(self_ty.span) else {
|
||||
return false;
|
||||
|
@ -94,6 +97,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
let is_object_safe = match self_ty.kind {
|
||||
hir::TyKind::TraitObject(objects, ..) => {
|
||||
objects.iter().all(|o| match o.trait_ref.path.res {
|
||||
Res::Def(DefKind::Trait, id) if Some(id) == owner => {
|
||||
// When we're dealing with a recursive trait, we don't want to downgrade
|
||||
// the error, so we consider them to be object safe always. (#119652)
|
||||
true
|
||||
}
|
||||
Res::Def(DefKind::Trait, id) => tcx.check_is_object_safe(id),
|
||||
_ => false,
|
||||
})
|
||||
|
@ -122,7 +130,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
],
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
} else {
|
||||
} else if diag.is_error() {
|
||||
// We'll emit the object safety error already, with a structured suggestion.
|
||||
diag.downgrade_to_delayed_bug();
|
||||
}
|
||||
|
@ -148,8 +156,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
if !is_object_safe {
|
||||
diag.note(format!("`{trait_name}` it is not object safe, so it can't be `dyn`"));
|
||||
// We'll emit the object safety error already, with a structured suggestion.
|
||||
diag.downgrade_to_delayed_bug();
|
||||
if diag.is_error() {
|
||||
// We'll emit the object safety error already, with a structured suggestion.
|
||||
diag.downgrade_to_delayed_bug();
|
||||
}
|
||||
} else {
|
||||
let sugg = if let hir::TyKind::TraitObject([_, _, ..], _, _) = self_ty.kind {
|
||||
// There are more than one trait bound, we need surrounding parentheses.
|
||||
|
|
|
@ -122,7 +122,7 @@ pub trait AstConv<'tcx> {
|
|||
&self,
|
||||
span: Span,
|
||||
item_def_id: DefId,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
poly_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||
) -> Ty<'tcx>;
|
||||
|
||||
|
@ -156,14 +156,14 @@ struct ConvertedBinding<'a, 'tcx> {
|
|||
hir_id: hir::HirId,
|
||||
item_name: Ident,
|
||||
kind: ConvertedBindingKind<'a, 'tcx>,
|
||||
gen_args: &'a GenericArgs<'a>,
|
||||
gen_args: &'tcx GenericArgs<'tcx>,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ConvertedBindingKind<'a, 'tcx> {
|
||||
Equality(Spanned<ty::Term<'tcx>>),
|
||||
Constraint(&'a [hir::GenericBound<'a>]),
|
||||
Constraint(&'a [hir::GenericBound<'tcx>]),
|
||||
}
|
||||
|
||||
/// New-typed boolean indicating whether explicit late-bound lifetimes
|
||||
|
@ -215,12 +215,12 @@ pub struct GenericArgCountResult {
|
|||
}
|
||||
|
||||
pub trait CreateSubstsForGenericArgsCtxt<'a, 'tcx> {
|
||||
fn args_for_def_id(&mut self, def_id: DefId) -> (Option<&'a GenericArgs<'a>>, bool);
|
||||
fn args_for_def_id(&mut self, def_id: DefId) -> (Option<&'a GenericArgs<'tcx>>, bool);
|
||||
|
||||
fn provided_kind(
|
||||
&mut self,
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'_>,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx>;
|
||||
|
||||
fn inferred_kind(
|
||||
|
@ -294,7 +294,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
&self,
|
||||
span: Span,
|
||||
def_id: DefId,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
) -> GenericArgsRef<'tcx> {
|
||||
let (args, _) = self.create_args_for_ast_path(
|
||||
span,
|
||||
|
@ -351,7 +351,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
def_id: DefId,
|
||||
parent_args: &[ty::GenericArg<'tcx>],
|
||||
seg: &hir::PathSegment<'_>,
|
||||
generic_args: &'a hir::GenericArgs<'_>,
|
||||
generic_args: &'a hir::GenericArgs<'tcx>,
|
||||
infer_args: bool,
|
||||
self_ty: Option<Ty<'tcx>>,
|
||||
constness: ty::BoundConstness,
|
||||
|
@ -406,14 +406,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
struct SubstsForAstPathCtxt<'a, 'tcx> {
|
||||
astconv: &'a (dyn AstConv<'tcx> + 'a),
|
||||
def_id: DefId,
|
||||
generic_args: &'a GenericArgs<'a>,
|
||||
generic_args: &'a GenericArgs<'tcx>,
|
||||
span: Span,
|
||||
inferred_params: Vec<Span>,
|
||||
infer_args: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> CreateSubstsForGenericArgsCtxt<'a, 'tcx> for SubstsForAstPathCtxt<'a, 'tcx> {
|
||||
fn args_for_def_id(&mut self, did: DefId) -> (Option<&'a GenericArgs<'a>>, bool) {
|
||||
fn args_for_def_id(&mut self, did: DefId) -> (Option<&'a GenericArgs<'tcx>>, bool) {
|
||||
if did == self.def_id {
|
||||
(Some(self.generic_args), self.infer_args)
|
||||
} else {
|
||||
|
@ -425,11 +425,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
fn provided_kind(
|
||||
&mut self,
|
||||
param: &ty::GenericParamDef,
|
||||
arg: &GenericArg<'_>,
|
||||
arg: &GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx> {
|
||||
let tcx = self.astconv.tcx();
|
||||
|
||||
let mut handle_ty_args = |has_default, ty: &hir::Ty<'_>| {
|
||||
let mut handle_ty_args = |has_default, ty: &hir::Ty<'tcx>| {
|
||||
if has_default {
|
||||
tcx.check_optional_stability(
|
||||
param.def_id,
|
||||
|
@ -592,7 +592,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
|
||||
fn create_assoc_bindings_for_generic_args<'a>(
|
||||
&self,
|
||||
generic_args: &'a hir::GenericArgs<'_>,
|
||||
generic_args: &'a hir::GenericArgs<'tcx>,
|
||||
) -> Vec<ConvertedBinding<'a, 'tcx>> {
|
||||
// Convert associated-type bindings or constraints into a separate vector.
|
||||
// Example: Given this:
|
||||
|
@ -640,7 +640,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
&self,
|
||||
span: Span,
|
||||
item_def_id: DefId,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
parent_args: GenericArgsRef<'tcx>,
|
||||
) -> GenericArgsRef<'tcx> {
|
||||
debug!(
|
||||
|
@ -673,7 +673,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
/// are disallowed. Otherwise, they are pushed onto the vector given.
|
||||
pub fn instantiate_mono_trait_ref(
|
||||
&self,
|
||||
trait_ref: &hir::TraitRef<'_>,
|
||||
trait_ref: &hir::TraitRef<'tcx>,
|
||||
self_ty: Ty<'tcx>,
|
||||
) -> ty::TraitRef<'tcx> {
|
||||
self.prohibit_generics(trait_ref.path.segments.split_last().unwrap().1.iter(), |_| {});
|
||||
|
@ -710,7 +710,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
#[instrument(level = "debug", skip(self, span, constness, bounds, speculative))]
|
||||
pub(crate) fn instantiate_poly_trait_ref(
|
||||
&self,
|
||||
trait_ref: &hir::TraitRef<'_>,
|
||||
trait_ref: &hir::TraitRef<'tcx>,
|
||||
span: Span,
|
||||
constness: ty::BoundConstness,
|
||||
polarity: ty::ImplPolarity,
|
||||
|
@ -788,7 +788,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span: Span,
|
||||
trait_def_id: DefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
trait_segment: &hir::PathSegment<'_>,
|
||||
trait_segment: &hir::PathSegment<'tcx>,
|
||||
is_impl: bool,
|
||||
// FIXME(effects) move all host param things in astconv to hir lowering
|
||||
constness: ty::BoundConstness,
|
||||
|
@ -813,7 +813,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span: Span,
|
||||
trait_def_id: DefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
trait_segment: &'a hir::PathSegment<'a>,
|
||||
trait_segment: &'a hir::PathSegment<'tcx>,
|
||||
is_impl: bool,
|
||||
constness: ty::BoundConstness,
|
||||
) -> (GenericArgsRef<'tcx>, GenericArgCountResult) {
|
||||
|
@ -847,7 +847,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
&self,
|
||||
span: Span,
|
||||
did: DefId,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
let args = self.ast_path_args_for_ty(span, did, item_segment);
|
||||
|
@ -1153,7 +1153,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span: Span,
|
||||
qself_ty: Ty<'tcx>,
|
||||
qself: &hir::Ty<'_>,
|
||||
assoc_segment: &hir::PathSegment<'_>,
|
||||
assoc_segment: &hir::PathSegment<'tcx>,
|
||||
permit_variants: bool,
|
||||
) -> Result<(Ty<'tcx>, DefKind, DefId), ErrorGuaranteed> {
|
||||
let tcx = self.tcx();
|
||||
|
@ -1428,7 +1428,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
fn lookup_inherent_assoc_ty(
|
||||
&self,
|
||||
name: Ident,
|
||||
segment: &hir::PathSegment<'_>,
|
||||
segment: &hir::PathSegment<'tcx>,
|
||||
adt_did: DefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
block: hir::HirId,
|
||||
|
@ -1446,7 +1446,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
|
||||
let candidates: Vec<_> = tcx
|
||||
.inherent_impls(adt_did)
|
||||
.inherent_impls(adt_did)?
|
||||
.iter()
|
||||
.filter_map(|&impl_| Some((impl_, self.lookup_assoc_ty_unchecked(name, block, impl_)?)))
|
||||
.collect();
|
||||
|
@ -1702,8 +1702,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
span: Span,
|
||||
opt_self_ty: Option<Ty<'tcx>>,
|
||||
item_def_id: DefId,
|
||||
trait_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
trait_segment: &hir::PathSegment<'tcx>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
constness: ty::BoundConstness,
|
||||
) -> Ty<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
|
@ -2021,7 +2021,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
pub fn res_to_ty(
|
||||
&self,
|
||||
opt_self_ty: Option<Ty<'tcx>>,
|
||||
path: &hir::Path<'_>,
|
||||
path: &hir::Path<'tcx>,
|
||||
hir_id: hir::HirId,
|
||||
permit_variants: bool,
|
||||
) -> Ty<'tcx> {
|
||||
|
@ -2311,13 +2311,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
|
||||
/// Parses the programmer's textual representation of a type into our
|
||||
/// internal notion of a type.
|
||||
pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
pub fn ast_ty_to_ty(&self, ast_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
self.ast_ty_to_ty_inner(ast_ty, false, false)
|
||||
}
|
||||
|
||||
/// Parses the programmer's textual representation of a type into our
|
||||
/// internal notion of a type. This is meant to be used within a path.
|
||||
pub fn ast_ty_to_ty_in_path(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
pub fn ast_ty_to_ty_in_path(&self, ast_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
self.ast_ty_to_ty_inner(ast_ty, false, true)
|
||||
}
|
||||
|
||||
|
@ -2432,7 +2432,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
/// Turns a `hir::Ty` into a `Ty`. For diagnostics' purposes we keep track of whether trait
|
||||
/// objects are borrowed like `&dyn Trait` to avoid emitting redundant errors.
|
||||
#[instrument(level = "debug", skip(self), ret)]
|
||||
fn ast_ty_to_ty_inner(&self, ast_ty: &hir::Ty<'_>, borrowed: bool, in_path: bool) -> Ty<'tcx> {
|
||||
fn ast_ty_to_ty_inner(
|
||||
&self,
|
||||
ast_ty: &hir::Ty<'tcx>,
|
||||
borrowed: bool,
|
||||
in_path: bool,
|
||||
) -> Ty<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
|
||||
let result_ty = match &ast_ty.kind {
|
||||
|
@ -2609,7 +2614,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_of_arg(&self, ty: &hir::Ty<'_>, expected_ty: Option<Ty<'tcx>>) -> Ty<'tcx> {
|
||||
pub fn ty_of_arg(&self, ty: &hir::Ty<'tcx>, expected_ty: Option<Ty<'tcx>>) -> Ty<'tcx> {
|
||||
match ty.kind {
|
||||
hir::TyKind::Infer if expected_ty.is_some() => {
|
||||
self.record_ty(ty.hir_id, expected_ty.unwrap(), ty.span);
|
||||
|
@ -2625,7 +2630,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
hir_id: hir::HirId,
|
||||
unsafety: hir::Unsafety,
|
||||
abi: abi::Abi,
|
||||
decl: &hir::FnDecl<'_>,
|
||||
decl: &hir::FnDecl<'tcx>,
|
||||
generics: Option<&hir::Generics<'_>>,
|
||||
hir_ty: Option<&hir::Ty<'_>>,
|
||||
) -> ty::PolyFnSig<'tcx> {
|
||||
|
|
|
@ -22,7 +22,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
|||
&self,
|
||||
span: Span,
|
||||
hir_id: hir::HirId,
|
||||
hir_trait_bounds: &[hir::PolyTraitRef<'_>],
|
||||
hir_trait_bounds: &[hir::PolyTraitRef<'tcx>],
|
||||
lifetime: &hir::Lifetime,
|
||||
borrowed: bool,
|
||||
representation: DynKind,
|
||||
|
|
|
@ -368,7 +368,7 @@ fn check_opaque_meets_bounds<'tcx>(
|
|||
// Can have different predicates to their defining use
|
||||
hir::OpaqueTyOrigin::TyAlias { .. } => {
|
||||
let wf_tys = ocx.assumed_wf_types_and_report_errors(param_env, def_id)?;
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, def_id, wf_tys);
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, def_id, &wf_tys);
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds);
|
||||
ocx.resolve_regions_and_report_errors(defining_use_anchor, &outlives_env)?;
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use super::potentially_plural_count;
|
|||
use crate::errors::LifetimesOrBoundsMismatchOnTrait;
|
||||
use hir::def_id::{DefId, DefIdMap, LocalDefId};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
||||
use rustc_errors::{pluralize, struct_span_code_err, Applicability, DiagnosticId, ErrorGuaranteed};
|
||||
use rustc_errors::{pluralize, struct_span_code_err, Applicability, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::intravisit;
|
||||
|
@ -378,7 +378,7 @@ fn compare_method_predicate_entailment<'tcx>(
|
|||
// lifetime parameters.
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(
|
||||
param_env,
|
||||
infcx.implied_bounds_tys(param_env, impl_m_def_id, wf_tys),
|
||||
infcx.implied_bounds_tys(param_env, impl_m_def_id, &wf_tys),
|
||||
);
|
||||
let errors = infcx.resolve_regions(&outlives_env);
|
||||
if !errors.is_empty() {
|
||||
|
@ -702,7 +702,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
|||
// lifetime parameters.
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(
|
||||
param_env,
|
||||
infcx.implied_bounds_tys(param_env, impl_m_def_id, wf_tys),
|
||||
infcx.implied_bounds_tys(param_env, impl_m_def_id, &wf_tys),
|
||||
);
|
||||
ocx.resolve_regions_and_report_errors(impl_m_def_id, &outlives_env)?;
|
||||
|
||||
|
@ -1382,7 +1382,7 @@ fn compare_number_of_generics<'tcx>(
|
|||
kind = kind,
|
||||
),
|
||||
);
|
||||
err.code(DiagnosticId::Error("E0049".into()));
|
||||
err.code("E0049".into());
|
||||
|
||||
let msg =
|
||||
format!("expected {trait_count} {kind} parameter{}", pluralize!(trait_count),);
|
||||
|
@ -2070,7 +2070,7 @@ pub(super) fn check_type_bounds<'tcx>(
|
|||
|
||||
// Finally, resolve all regions. This catches wily misuses of
|
||||
// lifetime parameters.
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, impl_ty_def_id, assumed_wf_types);
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, impl_ty_def_id, &assumed_wf_types);
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds);
|
||||
ocx.resolve_regions_and_report_errors(impl_ty_def_id, &outlives_env)
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ pub(super) fn check_refining_return_position_impl_trait_in_trait<'tcx>(
|
|||
}
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(
|
||||
param_env,
|
||||
infcx.implied_bounds_tys(param_env, impl_m.def_id.expect_local(), implied_wf_types),
|
||||
infcx.implied_bounds_tys(param_env, impl_m.def_id.expect_local(), &implied_wf_types),
|
||||
);
|
||||
let errors = infcx.resolve_regions(&outlives_env);
|
||||
if !errors.is_empty() {
|
||||
|
|
|
@ -681,7 +681,8 @@ fn resolve_local<'tcx>(
|
|||
| PatKind::Never
|
||||
| PatKind::Path(_)
|
||||
| PatKind::Lit(_)
|
||||
| PatKind::Range(_, _, _) => false,
|
||||
| PatKind::Range(_, _, _)
|
||||
| PatKind::Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ use rustc_infer::infer::outlives::obligations::TypeOutlives;
|
|||
use rustc_infer::infer::{self, InferCtxt, TyCtxtInferExt};
|
||||
use rustc_middle::mir::ConstraintCategory;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::trait_def::TraitSpecializationKind;
|
||||
use rustc_middle::ty::{
|
||||
self, AdtKind, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
|
||||
|
@ -112,8 +113,6 @@ where
|
|||
|
||||
let assumed_wf_types = wfcx.ocx.assumed_wf_types_and_report_errors(param_env, body_def_id)?;
|
||||
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, body_def_id, assumed_wf_types);
|
||||
|
||||
let errors = wfcx.select_all_or_error();
|
||||
if !errors.is_empty() {
|
||||
let err = infcx.err_ctxt().report_fulfillment_errors(errors);
|
||||
|
@ -128,10 +127,65 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
debug!(?assumed_wf_types);
|
||||
|
||||
let infcx_compat = infcx.fork();
|
||||
|
||||
// We specifically want to call the non-compat version of `implied_bounds_tys`; we do this always.
|
||||
let implied_bounds =
|
||||
infcx.implied_bounds_tys_compat(param_env, body_def_id, &assumed_wf_types, false);
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds);
|
||||
|
||||
wfcx.ocx.resolve_regions_and_report_errors(body_def_id, &outlives_env)?;
|
||||
infcx.tainted_by_errors().error_reported()
|
||||
let errors = infcx.resolve_regions(&outlives_env);
|
||||
if errors.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let is_bevy = 'is_bevy: {
|
||||
// We don't want to emit this for dependents of Bevy, for now.
|
||||
// See #119956
|
||||
let is_bevy_paramset = |def: ty::AdtDef<'_>| {
|
||||
let adt_did = with_no_trimmed_paths!(infcx.tcx.def_path_str(def.0.did));
|
||||
adt_did.contains("ParamSet")
|
||||
};
|
||||
for ty in assumed_wf_types.iter() {
|
||||
match ty.kind() {
|
||||
ty::Adt(def, _) => {
|
||||
if is_bevy_paramset(*def) {
|
||||
break 'is_bevy true;
|
||||
}
|
||||
}
|
||||
ty::Ref(_, ty, _) => match ty.kind() {
|
||||
ty::Adt(def, _) => {
|
||||
if is_bevy_paramset(*def) {
|
||||
break 'is_bevy true;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
false
|
||||
};
|
||||
|
||||
// If we have set `no_implied_bounds_compat`, then do not attempt compatibility.
|
||||
// We could also just always enter if `is_bevy`, and call `implied_bounds_tys`,
|
||||
// but that does result in slightly more work when this option is set and
|
||||
// just obscures what we mean here anyways. Let's just be explicit.
|
||||
if is_bevy && !infcx.tcx.sess.opts.unstable_opts.no_implied_bounds_compat {
|
||||
let implied_bounds =
|
||||
infcx_compat.implied_bounds_tys_compat(param_env, body_def_id, &assumed_wf_types, true);
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds);
|
||||
let errors_compat = infcx_compat.resolve_regions(&outlives_env);
|
||||
if errors_compat.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(infcx_compat.err_ctxt().report_region_errors(body_def_id, &errors_compat))
|
||||
}
|
||||
} else {
|
||||
Err(infcx.err_ctxt().report_region_errors(body_def_id, &errors))
|
||||
}
|
||||
}
|
||||
|
||||
fn check_well_formed(tcx: TyCtxt<'_>, def_id: hir::OwnerId) -> Result<(), ErrorGuaranteed> {
|
||||
|
@ -723,7 +777,7 @@ fn resolve_regions_with_wf_tys<'tcx>(
|
|||
let infcx = tcx.infer_ctxt().build();
|
||||
let outlives_environment = OutlivesEnvironment::with_bounds(
|
||||
param_env,
|
||||
infcx.implied_bounds_tys(param_env, id, wf_tys.clone()),
|
||||
infcx.implied_bounds_tys(param_env, id, wf_tys),
|
||||
);
|
||||
let region_bound_pairs = outlives_environment.region_bound_pairs();
|
||||
|
||||
|
@ -1736,7 +1790,7 @@ fn receiver_is_implemented<'tcx>(
|
|||
fn check_variances_for_type_defn<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
item: &hir::Item<'tcx>,
|
||||
hir_generics: &hir::Generics<'_>,
|
||||
hir_generics: &hir::Generics<'tcx>,
|
||||
) {
|
||||
let identity_args = ty::GenericArgs::identity_for_item(tcx, item.owner_id);
|
||||
|
||||
|
|
|
@ -13,32 +13,41 @@ use rustc_hir::def_id::{DefId, LocalDefId};
|
|||
use rustc_middle::ty::fast_reject::{simplify_type, SimplifiedType, TreatParams};
|
||||
use rustc_middle::ty::{self, CrateInherentImpls, Ty, TyCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::ErrorGuaranteed;
|
||||
|
||||
use crate::errors;
|
||||
|
||||
/// On-demand query: yields a map containing all types mapped to their inherent impls.
|
||||
pub fn crate_inherent_impls(tcx: TyCtxt<'_>, (): ()) -> CrateInherentImpls {
|
||||
pub fn crate_inherent_impls(
|
||||
tcx: TyCtxt<'_>,
|
||||
(): (),
|
||||
) -> Result<&'_ CrateInherentImpls, ErrorGuaranteed> {
|
||||
let mut collect = InherentCollect { tcx, impls_map: Default::default() };
|
||||
let mut res = Ok(());
|
||||
for id in tcx.hir().items() {
|
||||
collect.check_item(id);
|
||||
res = res.and(collect.check_item(id));
|
||||
}
|
||||
collect.impls_map
|
||||
res?;
|
||||
Ok(tcx.arena.alloc(collect.impls_map))
|
||||
}
|
||||
|
||||
pub fn crate_incoherent_impls(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
|
||||
let crate_map = tcx.crate_inherent_impls(());
|
||||
tcx.arena.alloc_from_iter(
|
||||
pub fn crate_incoherent_impls(
|
||||
tcx: TyCtxt<'_>,
|
||||
simp: SimplifiedType,
|
||||
) -> Result<&[DefId], ErrorGuaranteed> {
|
||||
let crate_map = tcx.crate_inherent_impls(())?;
|
||||
Ok(tcx.arena.alloc_from_iter(
|
||||
crate_map.incoherent_impls.get(&simp).unwrap_or(&Vec::new()).iter().map(|d| d.to_def_id()),
|
||||
)
|
||||
))
|
||||
}
|
||||
|
||||
/// On-demand query: yields a vector of the inherent impls for a specific type.
|
||||
pub fn inherent_impls(tcx: TyCtxt<'_>, ty_def_id: LocalDefId) -> &[DefId] {
|
||||
let crate_map = tcx.crate_inherent_impls(());
|
||||
match crate_map.inherent_impls.get(&ty_def_id) {
|
||||
pub fn inherent_impls(tcx: TyCtxt<'_>, ty_def_id: LocalDefId) -> Result<&[DefId], ErrorGuaranteed> {
|
||||
let crate_map = tcx.crate_inherent_impls(())?;
|
||||
Ok(match crate_map.inherent_impls.get(&ty_def_id) {
|
||||
Some(v) => &v[..],
|
||||
None => &[],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
struct InherentCollect<'tcx> {
|
||||
|
@ -47,14 +56,19 @@ struct InherentCollect<'tcx> {
|
|||
}
|
||||
|
||||
impl<'tcx> InherentCollect<'tcx> {
|
||||
fn check_def_id(&mut self, impl_def_id: LocalDefId, self_ty: Ty<'tcx>, ty_def_id: DefId) {
|
||||
fn check_def_id(
|
||||
&mut self,
|
||||
impl_def_id: LocalDefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
ty_def_id: DefId,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
if let Some(ty_def_id) = ty_def_id.as_local() {
|
||||
// Add the implementation to the mapping from implementation to base
|
||||
// type def ID, if there is a base type for this implementation and
|
||||
// the implementation does not have any associated traits.
|
||||
let vec = self.impls_map.inherent_impls.entry(ty_def_id).or_default();
|
||||
vec.push(impl_def_id.to_def_id());
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if self.tcx.features().rustc_attrs {
|
||||
|
@ -62,18 +76,16 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
|
||||
if !self.tcx.has_attr(ty_def_id, sym::rustc_has_incoherent_inherent_impls) {
|
||||
let impl_span = self.tcx.def_span(impl_def_id);
|
||||
self.tcx.dcx().emit_err(errors::InherentTyOutside { span: impl_span });
|
||||
return;
|
||||
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutside { span: impl_span }));
|
||||
}
|
||||
|
||||
for &impl_item in items {
|
||||
if !self.tcx.has_attr(impl_item, sym::rustc_allow_incoherent_impl) {
|
||||
let impl_span = self.tcx.def_span(impl_def_id);
|
||||
self.tcx.dcx().emit_err(errors::InherentTyOutsideRelevant {
|
||||
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutsideRelevant {
|
||||
span: impl_span,
|
||||
help_span: self.tcx.def_span(impl_item),
|
||||
});
|
||||
return;
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -82,24 +94,28 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
} else {
|
||||
bug!("unexpected self type: {:?}", self_ty);
|
||||
}
|
||||
Ok(())
|
||||
} else {
|
||||
let impl_span = self.tcx.def_span(impl_def_id);
|
||||
self.tcx.dcx().emit_err(errors::InherentTyOutsideNew { span: impl_span });
|
||||
Err(self.tcx.dcx().emit_err(errors::InherentTyOutsideNew { span: impl_span }))
|
||||
}
|
||||
}
|
||||
|
||||
fn check_primitive_impl(&mut self, impl_def_id: LocalDefId, ty: Ty<'tcx>) {
|
||||
fn check_primitive_impl(
|
||||
&mut self,
|
||||
impl_def_id: LocalDefId,
|
||||
ty: Ty<'tcx>,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let items = self.tcx.associated_item_def_ids(impl_def_id);
|
||||
if !self.tcx.hir().rustc_coherence_is_core() {
|
||||
if self.tcx.features().rustc_attrs {
|
||||
for &impl_item in items {
|
||||
if !self.tcx.has_attr(impl_item, sym::rustc_allow_incoherent_impl) {
|
||||
let span = self.tcx.def_span(impl_def_id);
|
||||
self.tcx.dcx().emit_err(errors::InherentTyOutsidePrimitive {
|
||||
return Err(self.tcx.dcx().emit_err(errors::InherentTyOutsidePrimitive {
|
||||
span,
|
||||
help_span: self.tcx.def_span(impl_item),
|
||||
});
|
||||
return;
|
||||
}));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -108,8 +124,7 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
if let ty::Ref(_, subty, _) = ty.kind() {
|
||||
note = Some(errors::InherentPrimitiveTyNote { subty: *subty });
|
||||
}
|
||||
self.tcx.dcx().emit_err(errors::InherentPrimitiveTy { span, note });
|
||||
return;
|
||||
return Err(self.tcx.dcx().emit_err(errors::InherentPrimitiveTy { span, note }));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,11 +133,12 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
} else {
|
||||
bug!("unexpected primitive type: {:?}", ty);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_item(&mut self, id: hir::ItemId) {
|
||||
fn check_item(&mut self, id: hir::ItemId) -> Result<(), ErrorGuaranteed> {
|
||||
if !matches!(self.tcx.def_kind(id.owner_id), DefKind::Impl { of_trait: false }) {
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let id = id.owner_id.def_id;
|
||||
|
@ -132,10 +148,10 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
ty::Adt(def, _) => self.check_def_id(id, self_ty, def.did()),
|
||||
ty::Foreign(did) => self.check_def_id(id, self_ty, did),
|
||||
ty::Dynamic(data, ..) if data.principal_def_id().is_some() => {
|
||||
self.check_def_id(id, self_ty, data.principal_def_id().unwrap());
|
||||
self.check_def_id(id, self_ty, data.principal_def_id().unwrap())
|
||||
}
|
||||
ty::Dynamic(..) => {
|
||||
self.tcx.dcx().emit_err(errors::InherentDyn { span: item_span });
|
||||
Err(self.tcx.dcx().emit_err(errors::InherentDyn { span: item_span }))
|
||||
}
|
||||
ty::Bool
|
||||
| ty::Char
|
||||
|
@ -151,7 +167,7 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
| ty::FnPtr(_)
|
||||
| ty::Tuple(..) => self.check_primitive_impl(id, self_ty),
|
||||
ty::Alias(..) | ty::Param(_) => {
|
||||
self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span });
|
||||
Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span }))
|
||||
}
|
||||
ty::FnDef(..)
|
||||
| ty::Closure(..)
|
||||
|
@ -162,7 +178,8 @@ impl<'tcx> InherentCollect<'tcx> {
|
|||
| ty::Infer(_) => {
|
||||
bug!("unexpected impl self type of impl: {:?} {:?}", id, self_ty);
|
||||
}
|
||||
ty::Error(_) => {}
|
||||
// We could bail out here, but that will silence other useful errors.
|
||||
ty::Error(_) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,16 +6,18 @@ use rustc_hir::def_id::DefId;
|
|||
use rustc_index::IndexVec;
|
||||
use rustc_middle::traits::specialization_graph::OverlapMode;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::{ErrorGuaranteed, Symbol};
|
||||
use rustc_trait_selection::traits::{self, SkipLeakCheck};
|
||||
use smallvec::SmallVec;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, (): ()) {
|
||||
pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, (): ()) -> Result<(), ErrorGuaranteed> {
|
||||
let mut inherent_overlap_checker = InherentOverlapChecker { tcx };
|
||||
let mut res = Ok(());
|
||||
for id in tcx.hir().items() {
|
||||
inherent_overlap_checker.check_item(id);
|
||||
res = res.and(inherent_overlap_checker.check_item(id));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
struct InherentOverlapChecker<'tcx> {
|
||||
|
@ -58,10 +60,11 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
== item2.ident(self.tcx).normalize_to_macros_2_0()
|
||||
}
|
||||
|
||||
fn check_for_duplicate_items_in_impl(&self, impl_: DefId) {
|
||||
fn check_for_duplicate_items_in_impl(&self, impl_: DefId) -> Result<(), ErrorGuaranteed> {
|
||||
let impl_items = self.tcx.associated_items(impl_);
|
||||
|
||||
let mut seen_items = FxHashMap::default();
|
||||
let mut res = Ok(());
|
||||
for impl_item in impl_items.in_definition_order() {
|
||||
let span = self.tcx.def_span(impl_item.def_id);
|
||||
let ident = impl_item.ident(self.tcx);
|
||||
|
@ -70,7 +73,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
match seen_items.entry(norm_ident) {
|
||||
Entry::Occupied(entry) => {
|
||||
let former = entry.get();
|
||||
struct_span_code_err!(
|
||||
res = Err(struct_span_code_err!(
|
||||
self.tcx.dcx(),
|
||||
span,
|
||||
E0592,
|
||||
|
@ -79,13 +82,14 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
)
|
||||
.with_span_label(span, format!("duplicate definitions for `{ident}`"))
|
||||
.with_span_label(*former, format!("other definition for `{ident}`"))
|
||||
.emit();
|
||||
.emit());
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(span);
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn check_for_common_items_in_impls(
|
||||
|
@ -93,10 +97,11 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
impl1: DefId,
|
||||
impl2: DefId,
|
||||
overlap: traits::OverlapResult<'_>,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let impl_items1 = self.tcx.associated_items(impl1);
|
||||
let impl_items2 = self.tcx.associated_items(impl2);
|
||||
|
||||
let mut res = Ok(());
|
||||
for &item1 in impl_items1.in_definition_order() {
|
||||
let collision = impl_items2
|
||||
.filter_by_name_unhygienic(item1.name)
|
||||
|
@ -128,9 +133,10 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
traits::add_placeholder_note(&mut err);
|
||||
}
|
||||
|
||||
err.emit();
|
||||
res = Err(err.emit());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn check_for_overlapping_inherent_impls(
|
||||
|
@ -138,7 +144,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
overlap_mode: OverlapMode,
|
||||
impl1_def_id: DefId,
|
||||
impl2_def_id: DefId,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let maybe_overlap = traits::overlapping_impls(
|
||||
self.tcx,
|
||||
impl1_def_id,
|
||||
|
@ -150,17 +156,19 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
);
|
||||
|
||||
if let Some(overlap) = maybe_overlap {
|
||||
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap);
|
||||
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id, overlap)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn check_item(&mut self, id: hir::ItemId) {
|
||||
fn check_item(&mut self, id: hir::ItemId) -> Result<(), ErrorGuaranteed> {
|
||||
let def_kind = self.tcx.def_kind(id.owner_id);
|
||||
if !matches!(def_kind, DefKind::Enum | DefKind::Struct | DefKind::Trait | DefKind::Union) {
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let impls = self.tcx.inherent_impls(id.owner_id);
|
||||
let impls = self.tcx.inherent_impls(id.owner_id)?;
|
||||
|
||||
let overlap_mode = OverlapMode::get(self.tcx, id.owner_id.to_def_id());
|
||||
|
||||
|
@ -173,17 +181,18 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
// otherwise switch to an allocating algorithm with
|
||||
// faster asymptotic runtime.
|
||||
const ALLOCATING_ALGO_THRESHOLD: usize = 500;
|
||||
let mut res = Ok(());
|
||||
if impls.len() < ALLOCATING_ALGO_THRESHOLD {
|
||||
for (i, &(&impl1_def_id, impl_items1)) in impls_items.iter().enumerate() {
|
||||
self.check_for_duplicate_items_in_impl(impl1_def_id);
|
||||
res = res.and(self.check_for_duplicate_items_in_impl(impl1_def_id));
|
||||
|
||||
for &(&impl2_def_id, impl_items2) in &impls_items[(i + 1)..] {
|
||||
if self.impls_have_common_items(impl_items1, impl_items2) {
|
||||
self.check_for_overlapping_inherent_impls(
|
||||
res = res.and(self.check_for_overlapping_inherent_impls(
|
||||
overlap_mode,
|
||||
impl1_def_id,
|
||||
impl2_def_id,
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -315,20 +324,21 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
|||
impl_blocks.sort_unstable();
|
||||
for (i, &impl1_items_idx) in impl_blocks.iter().enumerate() {
|
||||
let &(&impl1_def_id, impl_items1) = &impls_items[impl1_items_idx];
|
||||
self.check_for_duplicate_items_in_impl(impl1_def_id);
|
||||
res = res.and(self.check_for_duplicate_items_in_impl(impl1_def_id));
|
||||
|
||||
for &impl2_items_idx in impl_blocks[(i + 1)..].iter() {
|
||||
let &(&impl2_def_id, impl_items2) = &impls_items[impl2_items_idx];
|
||||
if self.impls_have_common_items(impl_items1, impl_items2) {
|
||||
self.check_for_overlapping_inherent_impls(
|
||||
res = res.and(self.check_for_overlapping_inherent_impls(
|
||||
overlap_mode,
|
||||
impl1_def_id,
|
||||
impl2_def_id,
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
|
|
@ -348,7 +348,7 @@ impl<'tcx> ItemCtxt<'tcx> {
|
|||
ItemCtxt { tcx, item_def_id, tainted_by_errors: Cell::new(None) }
|
||||
}
|
||||
|
||||
pub fn to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
pub fn to_ty(&self, ast_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
self.astconv().ast_ty_to_ty(ast_ty)
|
||||
}
|
||||
|
||||
|
@ -412,7 +412,7 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
|
|||
&self,
|
||||
span: Span,
|
||||
item_def_id: DefId,
|
||||
item_segment: &hir::PathSegment<'_>,
|
||||
item_segment: &hir::PathSegment<'tcx>,
|
||||
poly_trait_ref: ty::PolyTraitRef<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
if let Some(trait_ref) = poly_trait_ref.no_bound_vars() {
|
||||
|
@ -1148,7 +1148,7 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<ty::PolyFnSig<
|
|||
|
||||
fn infer_return_ty_for_fn_sig<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
sig: &hir::FnSig<'_>,
|
||||
sig: &hir::FnSig<'tcx>,
|
||||
generics: &hir::Generics<'_>,
|
||||
def_id: LocalDefId,
|
||||
icx: &ItemCtxt<'tcx>,
|
||||
|
@ -1352,14 +1352,14 @@ fn impl_trait_ref(
|
|||
let last_arg = args.args.len() - 1;
|
||||
assert!(matches!(args.args[last_arg], hir::GenericArg::Const(anon_const) if anon_const.is_desugared_from_effects));
|
||||
args.args = &args.args[..args.args.len() - 1];
|
||||
path_segments[last_segment].args = Some(&args);
|
||||
path_segments[last_segment].args = Some(tcx.hir_arena.alloc(args));
|
||||
let path = hir::Path {
|
||||
span: ast_trait_ref.path.span,
|
||||
res: ast_trait_ref.path.res,
|
||||
segments: &path_segments,
|
||||
segments: tcx.hir_arena.alloc_slice(&path_segments),
|
||||
};
|
||||
let trait_ref = hir::TraitRef { path: &path, hir_ref_id: ast_trait_ref.hir_ref_id };
|
||||
icx.astconv().instantiate_mono_trait_ref(&trait_ref, selfty)
|
||||
let trait_ref = tcx.hir_arena.alloc(hir::TraitRef { path: tcx.hir_arena.alloc(path), hir_ref_id: ast_trait_ref.hir_ref_id });
|
||||
icx.astconv().instantiate_mono_trait_ref(trait_ref, selfty)
|
||||
} else {
|
||||
icx.astconv().instantiate_mono_trait_ref(ast_trait_ref, selfty)
|
||||
}
|
||||
|
|
|
@ -5,20 +5,22 @@ use rustc_hir::intravisit::{self, Visitor};
|
|||
use rustc_hir::{self as hir, def, Expr, ImplItem, Item, Node, TraitItem};
|
||||
use rustc_middle::hir::nested_filter;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
|
||||
use rustc_span::{sym, DUMMY_SP};
|
||||
use rustc_span::{sym, ErrorGuaranteed, DUMMY_SP};
|
||||
|
||||
use crate::errors::{TaitForwardCompat, TypeOf, UnconstrainedOpaqueType};
|
||||
|
||||
pub fn test_opaque_hidden_types(tcx: TyCtxt<'_>) {
|
||||
pub fn test_opaque_hidden_types(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
||||
let mut res = Ok(());
|
||||
if tcx.has_attr(CRATE_DEF_ID, sym::rustc_hidden_type_of_opaques) {
|
||||
for id in tcx.hir().items() {
|
||||
if matches!(tcx.def_kind(id.owner_id), DefKind::OpaqueTy) {
|
||||
let type_of = tcx.type_of(id.owner_id).instantiate_identity();
|
||||
|
||||
tcx.dcx().emit_err(TypeOf { span: tcx.def_span(id.owner_id), type_of });
|
||||
res = Err(tcx.dcx().emit_err(TypeOf { span: tcx.def_span(id.owner_id), type_of }));
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Checks "defining uses" of opaque `impl Trait` types to ensure that they meet the restrictions
|
||||
|
@ -135,18 +137,25 @@ impl TaitConstraintLocator<'_> {
|
|||
return;
|
||||
}
|
||||
|
||||
if let Some(hir_sig) = self.tcx.hir_node_by_def_id(item_def_id).fn_decl() {
|
||||
if hir_sig.output.get_infer_ret_ty().is_some() {
|
||||
let guar = self.tcx.dcx().span_delayed_bug(
|
||||
hir_sig.output.span(),
|
||||
"inferring return types and opaque types do not mix well",
|
||||
);
|
||||
self.found = Some(ty::OpaqueHiddenType {
|
||||
span: DUMMY_SP,
|
||||
ty: Ty::new_error(self.tcx, guar),
|
||||
});
|
||||
return;
|
||||
}
|
||||
// Function items with `_` in their return type already emit an error, skip any
|
||||
// "non-defining use" errors for them.
|
||||
// Note that we use `Node::fn_sig` instead of `Node::fn_decl` here, because the former
|
||||
// excludes closures, which are allowed to have `_` in their return type.
|
||||
let hir_node = self.tcx.hir_node_by_def_id(item_def_id);
|
||||
debug_assert!(
|
||||
!matches!(hir_node, Node::ForeignItem(..)),
|
||||
"foreign items cannot constrain opaque types",
|
||||
);
|
||||
if let Some(hir_sig) = hir_node.fn_sig()
|
||||
&& hir_sig.decl.output.get_infer_ret_ty().is_some()
|
||||
{
|
||||
let guar = self.tcx.dcx().span_delayed_bug(
|
||||
hir_sig.decl.output.span(),
|
||||
"inferring return types and opaque types do not mix well",
|
||||
);
|
||||
self.found =
|
||||
Some(ty::OpaqueHiddenType { span: DUMMY_SP, ty: Ty::new_error(self.tcx, guar) });
|
||||
return;
|
||||
}
|
||||
|
||||
// Calling `mir_borrowck` can lead to cycle errors through
|
||||
|
|
|
@ -17,7 +17,7 @@ use rustc_hir::def::DefKind;
|
|||
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
|
||||
use rustc_span::{Span, Symbol};
|
||||
use rustc_span::{ErrorGuaranteed, Span, Symbol};
|
||||
|
||||
mod min_specialization;
|
||||
|
||||
|
@ -51,24 +51,29 @@ mod min_specialization;
|
|||
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
|
||||
/// // ^ 'a is unused and appears in assoc type, error
|
||||
/// ```
|
||||
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
|
||||
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) -> Result<(), ErrorGuaranteed> {
|
||||
let min_specialization = tcx.features().min_specialization;
|
||||
let module = tcx.hir_module_items(module_def_id);
|
||||
let mut res = Ok(());
|
||||
for id in module.items() {
|
||||
if matches!(tcx.def_kind(id.owner_id), DefKind::Impl { .. }) {
|
||||
enforce_impl_params_are_constrained(tcx, id.owner_id.def_id);
|
||||
res = res.and(enforce_impl_params_are_constrained(tcx, id.owner_id.def_id));
|
||||
if min_specialization {
|
||||
check_min_specialization(tcx, id.owner_id.def_id);
|
||||
res = res.and(check_min_specialization(tcx, id.owner_id.def_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
*providers = Providers { check_mod_impl_wf, ..*providers };
|
||||
}
|
||||
|
||||
fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) {
|
||||
fn enforce_impl_params_are_constrained(
|
||||
tcx: TyCtxt<'_>,
|
||||
impl_def_id: LocalDefId,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
// Every lifetime used in an associated type must be constrained.
|
||||
let impl_self_ty = tcx.type_of(impl_def_id).instantiate_identity();
|
||||
if impl_self_ty.references_error() {
|
||||
|
@ -80,7 +85,10 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
|
|||
"potentially unconstrained type parameters weren't evaluated: {impl_self_ty:?}",
|
||||
),
|
||||
);
|
||||
return;
|
||||
// This is super fishy, but our current `rustc_hir_analysis::check_crate` pipeline depends on
|
||||
// `type_of` having been called much earlier, and thus this value being read from cache.
|
||||
// Compilation must continue in order for other important diagnostics to keep showing up.
|
||||
return Ok(());
|
||||
}
|
||||
let impl_generics = tcx.generics_of(impl_def_id);
|
||||
let impl_predicates = tcx.predicates_of(impl_def_id);
|
||||
|
@ -113,13 +121,19 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
|
|||
})
|
||||
.collect();
|
||||
|
||||
let mut res = Ok(());
|
||||
for param in &impl_generics.params {
|
||||
match param.kind {
|
||||
// Disallow ANY unconstrained type parameters.
|
||||
ty::GenericParamDefKind::Type { .. } => {
|
||||
let param_ty = ty::ParamTy::for_def(param);
|
||||
if !input_parameters.contains(&cgp::Parameter::from(param_ty)) {
|
||||
report_unused_parameter(tcx, tcx.def_span(param.def_id), "type", param_ty.name);
|
||||
res = Err(report_unused_parameter(
|
||||
tcx,
|
||||
tcx.def_span(param.def_id),
|
||||
"type",
|
||||
param_ty.name,
|
||||
));
|
||||
}
|
||||
}
|
||||
ty::GenericParamDefKind::Lifetime => {
|
||||
|
@ -127,27 +141,28 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
|
|||
if lifetimes_in_associated_types.contains(¶m_lt) && // (*)
|
||||
!input_parameters.contains(¶m_lt)
|
||||
{
|
||||
report_unused_parameter(
|
||||
res = Err(report_unused_parameter(
|
||||
tcx,
|
||||
tcx.def_span(param.def_id),
|
||||
"lifetime",
|
||||
param.name,
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
ty::GenericParamDefKind::Const { .. } => {
|
||||
let param_ct = ty::ParamConst::for_def(param);
|
||||
if !input_parameters.contains(&cgp::Parameter::from(param_ct)) {
|
||||
report_unused_parameter(
|
||||
res = Err(report_unused_parameter(
|
||||
tcx,
|
||||
tcx.def_span(param.def_id),
|
||||
"const",
|
||||
param_ct.name,
|
||||
);
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
|
||||
// (*) This is a horrible concession to reality. I think it'd be
|
||||
// better to just ban unconstrained lifetimes outright, but in
|
||||
|
@ -169,7 +184,12 @@ fn enforce_impl_params_are_constrained(tcx: TyCtxt<'_>, impl_def_id: LocalDefId)
|
|||
// used elsewhere are not projected back out.
|
||||
}
|
||||
|
||||
fn report_unused_parameter(tcx: TyCtxt<'_>, span: Span, kind: &str, name: Symbol) {
|
||||
fn report_unused_parameter(
|
||||
tcx: TyCtxt<'_>,
|
||||
span: Span,
|
||||
kind: &str,
|
||||
name: Symbol,
|
||||
) -> ErrorGuaranteed {
|
||||
let mut err = struct_span_code_err!(
|
||||
tcx.dcx(),
|
||||
span,
|
||||
|
@ -188,5 +208,5 @@ fn report_unused_parameter(tcx: TyCtxt<'_>, span: Span, kind: &str, name: Symbol
|
|||
"proving the result of expressions other than the parameter are unique is not supported",
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
err.emit()
|
||||
}
|
||||
|
|
|
@ -82,10 +82,14 @@ use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt;
|
|||
use rustc_trait_selection::traits::outlives_bounds::InferCtxtExt as _;
|
||||
use rustc_trait_selection::traits::{self, translate_args_with_cause, wf, ObligationCtxt};
|
||||
|
||||
pub(super) fn check_min_specialization(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) {
|
||||
pub(super) fn check_min_specialization(
|
||||
tcx: TyCtxt<'_>,
|
||||
impl_def_id: LocalDefId,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
if let Some(node) = parent_specialization_node(tcx, impl_def_id) {
|
||||
check_always_applicable(tcx, impl_def_id, node);
|
||||
check_always_applicable(tcx, impl_def_id, node)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId) -> Option<Node> {
|
||||
|
@ -109,42 +113,58 @@ fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId) -> Opti
|
|||
|
||||
/// Check that `impl1` is a sound specialization
|
||||
#[instrument(level = "debug", skip(tcx))]
|
||||
fn check_always_applicable(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node) {
|
||||
fn check_always_applicable(
|
||||
tcx: TyCtxt<'_>,
|
||||
impl1_def_id: LocalDefId,
|
||||
impl2_node: Node,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let span = tcx.def_span(impl1_def_id);
|
||||
check_has_items(tcx, impl1_def_id, impl2_node, span);
|
||||
let mut res = check_has_items(tcx, impl1_def_id, impl2_node, span);
|
||||
|
||||
if let Ok((impl1_args, impl2_args)) = get_impl_args(tcx, impl1_def_id, impl2_node) {
|
||||
let impl2_def_id = impl2_node.def_id();
|
||||
debug!(?impl2_def_id, ?impl2_args);
|
||||
let (impl1_args, impl2_args) = get_impl_args(tcx, impl1_def_id, impl2_node)?;
|
||||
let impl2_def_id = impl2_node.def_id();
|
||||
debug!(?impl2_def_id, ?impl2_args);
|
||||
|
||||
let parent_args = if impl2_node.is_from_trait() {
|
||||
impl2_args.to_vec()
|
||||
} else {
|
||||
unconstrained_parent_impl_args(tcx, impl2_def_id, impl2_args)
|
||||
};
|
||||
let parent_args = if impl2_node.is_from_trait() {
|
||||
impl2_args.to_vec()
|
||||
} else {
|
||||
unconstrained_parent_impl_args(tcx, impl2_def_id, impl2_args)
|
||||
};
|
||||
|
||||
check_constness(tcx, impl1_def_id, impl2_node, span);
|
||||
check_static_lifetimes(tcx, &parent_args, span);
|
||||
check_duplicate_params(tcx, impl1_args, &parent_args, span);
|
||||
check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span);
|
||||
}
|
||||
res = res.and(check_constness(tcx, impl1_def_id, impl2_node, span));
|
||||
res = res.and(check_static_lifetimes(tcx, &parent_args, span));
|
||||
res = res.and(check_duplicate_params(tcx, impl1_args, &parent_args, span));
|
||||
res = res.and(check_predicates(tcx, impl1_def_id, impl1_args, impl2_node, impl2_args, span));
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn check_has_items(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) {
|
||||
fn check_has_items(
|
||||
tcx: TyCtxt<'_>,
|
||||
impl1_def_id: LocalDefId,
|
||||
impl2_node: Node,
|
||||
span: Span,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
if let Node::Impl(impl2_id) = impl2_node
|
||||
&& tcx.associated_item_def_ids(impl1_def_id).is_empty()
|
||||
{
|
||||
let base_impl_span = tcx.def_span(impl2_id);
|
||||
tcx.dcx().emit_err(errors::EmptySpecialization { span, base_impl_span });
|
||||
return Err(tcx.dcx().emit_err(errors::EmptySpecialization { span, base_impl_span }));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check that the specializing impl `impl1` is at least as const as the base
|
||||
/// impl `impl2`
|
||||
fn check_constness(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) {
|
||||
fn check_constness(
|
||||
tcx: TyCtxt<'_>,
|
||||
impl1_def_id: LocalDefId,
|
||||
impl2_node: Node,
|
||||
span: Span,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
if impl2_node.is_from_trait() {
|
||||
// This isn't a specialization
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let impl1_constness = tcx.constness(impl1_def_id.to_def_id());
|
||||
|
@ -152,9 +172,10 @@ fn check_constness(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node,
|
|||
|
||||
if let hir::Constness::Const = impl2_constness {
|
||||
if let hir::Constness::NotConst = impl1_constness {
|
||||
tcx.dcx().emit_err(errors::ConstSpecialize { span });
|
||||
return Err(tcx.dcx().emit_err(errors::ConstSpecialize { span }));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Given a specializing impl `impl1`, and the base impl `impl2`, returns two
|
||||
|
@ -202,7 +223,7 @@ fn get_impl_args(
|
|||
return Err(guar);
|
||||
}
|
||||
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, impl1_def_id, assumed_wf_types);
|
||||
let implied_bounds = infcx.implied_bounds_tys(param_env, impl1_def_id, &assumed_wf_types);
|
||||
let outlives_env = OutlivesEnvironment::with_bounds(param_env, implied_bounds);
|
||||
let _ = ocx.resolve_regions_and_report_errors(impl1_def_id, &outlives_env);
|
||||
let Ok(impl2_args) = infcx.fully_resolve(impl2_args) else {
|
||||
|
@ -290,15 +311,17 @@ fn check_duplicate_params<'tcx>(
|
|||
impl1_args: GenericArgsRef<'tcx>,
|
||||
parent_args: &Vec<GenericArg<'tcx>>,
|
||||
span: Span,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let mut base_params = cgp::parameters_for(parent_args, true);
|
||||
base_params.sort_by_key(|param| param.0);
|
||||
if let (_, [duplicate, ..]) = base_params.partition_dedup() {
|
||||
let param = impl1_args[duplicate.0 as usize];
|
||||
tcx.dcx()
|
||||
return Err(tcx
|
||||
.dcx()
|
||||
.struct_span_err(span, format!("specializing impl repeats parameter `{param}`"))
|
||||
.emit();
|
||||
.emit());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check that `'static` lifetimes are not introduced by the specializing impl.
|
||||
|
@ -313,10 +336,11 @@ fn check_static_lifetimes<'tcx>(
|
|||
tcx: TyCtxt<'tcx>,
|
||||
parent_args: &Vec<GenericArg<'tcx>>,
|
||||
span: Span,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
if tcx.any_free_region_meets(parent_args, |r| r.is_static()) {
|
||||
tcx.dcx().emit_err(errors::StaticSpecialize { span });
|
||||
return Err(tcx.dcx().emit_err(errors::StaticSpecialize { span }));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check whether predicates on the specializing impl (`impl1`) are allowed.
|
||||
|
@ -337,7 +361,7 @@ fn check_predicates<'tcx>(
|
|||
impl2_node: Node,
|
||||
impl2_args: GenericArgsRef<'tcx>,
|
||||
span: Span,
|
||||
) {
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
let impl1_predicates: Vec<_> = traits::elaborate(
|
||||
tcx,
|
||||
tcx.predicates_of(impl1_def_id).instantiate(tcx, impl1_args).into_iter(),
|
||||
|
@ -399,14 +423,16 @@ fn check_predicates<'tcx>(
|
|||
}
|
||||
impl2_predicates.extend(traits::elaborate(tcx, always_applicable_traits));
|
||||
|
||||
let mut res = Ok(());
|
||||
for (clause, span) in impl1_predicates {
|
||||
if !impl2_predicates
|
||||
.iter()
|
||||
.any(|pred2| trait_predicates_eq(tcx, clause.as_predicate(), *pred2, span))
|
||||
{
|
||||
check_specialization_on(tcx, clause, span)
|
||||
res = res.and(check_specialization_on(tcx, clause, span))
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Checks if some predicate on the specializing impl (`predicate1`) is the same
|
||||
|
@ -443,19 +469,26 @@ fn trait_predicates_eq<'tcx>(
|
|||
}
|
||||
|
||||
#[instrument(level = "debug", skip(tcx))]
|
||||
fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, span: Span) {
|
||||
fn check_specialization_on<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
clause: ty::Clause<'tcx>,
|
||||
span: Span,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
match clause.kind().skip_binder() {
|
||||
// Global predicates are either always true or always false, so we
|
||||
// are fine to specialize on.
|
||||
_ if clause.is_global() => (),
|
||||
_ if clause.is_global() => Ok(()),
|
||||
// We allow specializing on explicitly marked traits with no associated
|
||||
// items.
|
||||
ty::ClauseKind::Trait(ty::TraitPredicate { trait_ref, polarity: _ }) => {
|
||||
if !matches!(
|
||||
if matches!(
|
||||
trait_specialization_kind(tcx, clause),
|
||||
Some(TraitSpecializationKind::Marker)
|
||||
) {
|
||||
tcx.dcx()
|
||||
Ok(())
|
||||
} else {
|
||||
Err(tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
span,
|
||||
format!(
|
||||
|
@ -463,17 +496,16 @@ fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, sp
|
|||
tcx.def_path_str(trait_ref.def_id),
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
.emit())
|
||||
}
|
||||
}
|
||||
ty::ClauseKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => {
|
||||
tcx.dcx()
|
||||
.struct_span_err(
|
||||
span,
|
||||
format!("cannot specialize on associated type `{projection_ty} == {term}`",),
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
ty::ClauseKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => Err(tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
span,
|
||||
format!("cannot specialize on associated type `{projection_ty} == {term}`",),
|
||||
)
|
||||
.emit()),
|
||||
ty::ClauseKind::ConstArgHasType(..) => {
|
||||
// FIXME(min_specialization), FIXME(const_generics):
|
||||
// It probably isn't right to allow _every_ `ConstArgHasType` but I am somewhat unsure
|
||||
|
@ -483,12 +515,12 @@ fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, clause: ty::Clause<'tcx>, sp
|
|||
// While we do not support constructs like `<T, const N: T>` there is probably no risk of
|
||||
// soundness bugs, but when we support generic const parameter types this will need to be
|
||||
// revisited.
|
||||
Ok(())
|
||||
}
|
||||
_ => {
|
||||
tcx.dcx()
|
||||
.struct_span_err(span, format!("cannot specialize on predicate `{clause}`"))
|
||||
.emit();
|
||||
}
|
||||
_ => Err(tcx
|
||||
.dcx()
|
||||
.struct_span_err(span, format!("cannot specialize on predicate `{clause}`"))
|
||||
.emit()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -166,33 +166,29 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
|||
tcx.hir().for_each_module(|module| tcx.ensure().collect_mod_item_types(module))
|
||||
});
|
||||
|
||||
// FIXME(matthewjasper) We shouldn't need to use `track_errors` anywhere in this function
|
||||
// or the compiler in general.
|
||||
if tcx.features().rustc_attrs {
|
||||
tcx.sess.track_errors(|| {
|
||||
tcx.sess.time("outlives_testing", || outlives::test::test_inferred_outlives(tcx));
|
||||
})?;
|
||||
tcx.sess.time("outlives_testing", || outlives::test::test_inferred_outlives(tcx))?;
|
||||
}
|
||||
|
||||
tcx.sess.track_errors(|| {
|
||||
tcx.sess.time("coherence_checking", || {
|
||||
// Check impls constrain their parameters
|
||||
tcx.hir().for_each_module(|module| tcx.ensure().check_mod_impl_wf(module));
|
||||
tcx.sess.time("coherence_checking", || {
|
||||
// Check impls constrain their parameters
|
||||
let res =
|
||||
tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_impl_wf(module));
|
||||
|
||||
// FIXME(matthewjasper) We shouldn't need to use `track_errors` anywhere in this function
|
||||
// or the compiler in general.
|
||||
res.and(tcx.sess.track_errors(|| {
|
||||
for &trait_def_id in tcx.all_local_trait_impls(()).keys() {
|
||||
tcx.ensure().coherent_trait(trait_def_id);
|
||||
}
|
||||
|
||||
// these queries are executed for side-effects (error reporting):
|
||||
tcx.ensure().crate_inherent_impls(());
|
||||
tcx.ensure().crate_inherent_impls_overlap_check(());
|
||||
});
|
||||
}))
|
||||
// these queries are executed for side-effects (error reporting):
|
||||
.and(tcx.ensure().crate_inherent_impls(()))
|
||||
.and(tcx.ensure().crate_inherent_impls_overlap_check(()))
|
||||
})?;
|
||||
|
||||
if tcx.features().rustc_attrs {
|
||||
tcx.sess.track_errors(|| {
|
||||
tcx.sess.time("variance_testing", || variance::test::test_variance(tcx));
|
||||
})?;
|
||||
tcx.sess.time("variance_testing", || variance::test::test_variance(tcx))?;
|
||||
}
|
||||
|
||||
tcx.sess.time("wf_checking", || {
|
||||
|
@ -200,7 +196,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
|||
})?;
|
||||
|
||||
if tcx.features().rustc_attrs {
|
||||
tcx.sess.track_errors(|| collect::test_opaque_hidden_types(tcx))?;
|
||||
collect::test_opaque_hidden_types(tcx)?;
|
||||
}
|
||||
|
||||
// Freeze definitions as we don't add new ones at this point. This improves performance by
|
||||
|
@ -225,7 +221,7 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
|||
|
||||
/// A quasi-deprecated helper used in rustdoc and clippy to get
|
||||
/// the type from a HIR node.
|
||||
pub fn hir_ty_to_ty<'tcx>(tcx: TyCtxt<'tcx>, hir_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
pub fn hir_ty_to_ty<'tcx>(tcx: TyCtxt<'tcx>, hir_ty: &hir::Ty<'tcx>) -> Ty<'tcx> {
|
||||
// In case there are any projections, etc., find the "environment"
|
||||
// def-ID that will be used to determine the traits/predicates in
|
||||
// scope. This is derived from the enclosing item-like thing.
|
||||
|
@ -236,7 +232,7 @@ pub fn hir_ty_to_ty<'tcx>(tcx: TyCtxt<'tcx>, hir_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
|||
|
||||
pub fn hir_trait_to_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
hir_trait: &hir::TraitRef<'_>,
|
||||
hir_trait: &hir::TraitRef<'tcx>,
|
||||
self_ty: Ty<'tcx>,
|
||||
) -> Bounds<'tcx> {
|
||||
// In case there are any projections, etc., find the "environment"
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{symbol::sym, ErrorGuaranteed};
|
||||
|
||||
pub fn test_inferred_outlives(tcx: TyCtxt<'_>) {
|
||||
pub fn test_inferred_outlives(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
||||
let mut res = Ok(());
|
||||
for id in tcx.hir().items() {
|
||||
// For unit testing: check for a special "rustc_outlives"
|
||||
// attribute and report an error with various results if found.
|
||||
|
@ -22,7 +23,8 @@ pub fn test_inferred_outlives(tcx: TyCtxt<'_>) {
|
|||
for p in pred {
|
||||
err.note(p);
|
||||
}
|
||||
err.emit();
|
||||
res = Err(err.emit());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
|
|
@ -6,13 +6,13 @@ pub use self::{
|
|||
missing_cast_for_variadic_arg::*, sized_unsized_cast::*, wrong_number_of_generic_args::*,
|
||||
};
|
||||
|
||||
use rustc_errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use rustc_errors::DiagnosticBuilder;
|
||||
use rustc_session::Session;
|
||||
|
||||
pub trait StructuredDiagnostic<'tcx> {
|
||||
fn session(&self) -> &Session;
|
||||
|
||||
fn code(&self) -> DiagnosticId;
|
||||
fn code(&self) -> String;
|
||||
|
||||
fn diagnostic(&self) -> DiagnosticBuilder<'tcx> {
|
||||
let err = self.diagnostic_common();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{errors, structured_errors::StructuredDiagnostic};
|
||||
use rustc_errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use rustc_errors::DiagnosticBuilder;
|
||||
use rustc_middle::ty::{Ty, TypeVisitableExt};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Span;
|
||||
|
@ -16,7 +16,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for MissingCastForVariadicArg<'tcx, '_> {
|
|||
self.sess
|
||||
}
|
||||
|
||||
fn code(&self) -> DiagnosticId {
|
||||
fn code(&self) -> String {
|
||||
rustc_errors::error_code!(E0617)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{errors, structured_errors::StructuredDiagnostic};
|
||||
use rustc_errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use rustc_errors::DiagnosticBuilder;
|
||||
use rustc_middle::ty::{Ty, TypeVisitableExt};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Span;
|
||||
|
@ -16,7 +16,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCast<'tcx> {
|
|||
self.sess
|
||||
}
|
||||
|
||||
fn code(&self) -> DiagnosticId {
|
||||
fn code(&self) -> String {
|
||||
rustc_errors::error_code!(E0607)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
use crate::structured_errors::StructuredDiagnostic;
|
||||
use rustc_errors::{
|
||||
pluralize, Applicability, Diagnostic, DiagnosticBuilder, DiagnosticId, MultiSpan,
|
||||
};
|
||||
use rustc_errors::{pluralize, Applicability, Diagnostic, DiagnosticBuilder, MultiSpan};
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::ty::{self as ty, AssocItems, AssocKind, TyCtxt};
|
||||
use rustc_session::Session;
|
||||
|
@ -1107,7 +1105,7 @@ impl<'tcx> StructuredDiagnostic<'tcx> for WrongNumberOfGenericArgs<'_, 'tcx> {
|
|||
self.tcx.sess
|
||||
}
|
||||
|
||||
fn code(&self) -> DiagnosticId {
|
||||
fn code(&self) -> String {
|
||||
rustc_errors::error_code!(E0107)
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
//!
|
||||
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/variance.html
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_arena::DroplessArena;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
|
@ -91,7 +92,7 @@ fn variance_of_opaque(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[ty::Varianc
|
|||
fn visit_opaque(&mut self, def_id: DefId, args: GenericArgsRef<'tcx>) -> ControlFlow<!> {
|
||||
if def_id != self.root_def_id && self.tcx.is_descendant_of(def_id, self.root_def_id) {
|
||||
let child_variances = self.tcx.variances_of(def_id);
|
||||
for (a, v) in args.iter().zip(child_variances) {
|
||||
for (a, v) in args.iter().zip_eq(child_variances) {
|
||||
if *v != ty::Bivariant {
|
||||
a.visit_with(self)?;
|
||||
}
|
||||
|
|
|
@ -2,19 +2,21 @@ use rustc_hir::def::DefKind;
|
|||
use rustc_hir::def_id::CRATE_DEF_ID;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::ErrorGuaranteed;
|
||||
|
||||
use crate::errors;
|
||||
|
||||
pub fn test_variance(tcx: TyCtxt<'_>) {
|
||||
pub fn test_variance(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> {
|
||||
let mut res = Ok(());
|
||||
if tcx.has_attr(CRATE_DEF_ID, sym::rustc_variance_of_opaques) {
|
||||
for id in tcx.hir().items() {
|
||||
if matches!(tcx.def_kind(id.owner_id), DefKind::OpaqueTy) {
|
||||
let variances_of = tcx.variances_of(id.owner_id);
|
||||
|
||||
tcx.dcx().emit_err(errors::VariancesOf {
|
||||
res = Err(tcx.dcx().emit_err(errors::VariancesOf {
|
||||
span: tcx.def_span(id.owner_id),
|
||||
variances_of: format!("{variances_of:?}"),
|
||||
});
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -25,10 +27,11 @@ pub fn test_variance(tcx: TyCtxt<'_>) {
|
|||
if tcx.has_attr(id.owner_id, sym::rustc_variance) {
|
||||
let variances_of = tcx.variances_of(id.owner_id);
|
||||
|
||||
tcx.dcx().emit_err(errors::VariancesOf {
|
||||
res = Err(tcx.dcx().emit_err(errors::VariancesOf {
|
||||
span: tcx.def_span(id.owner_id),
|
||||
variances_of: format!("{variances_of:?}"),
|
||||
});
|
||||
}));
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
|
|
@ -1838,6 +1838,11 @@ impl<'a> State<'a> {
|
|||
self.commasep(Inconsistent, after, |s, p| s.print_pat(p));
|
||||
self.word("]");
|
||||
}
|
||||
PatKind::Err(_) => {
|
||||
self.popen();
|
||||
self.word("/*ERROR*/");
|
||||
self.pclose();
|
||||
}
|
||||
}
|
||||
self.ann.post(self, AnnNode::Pat(pat))
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue