Auto merge of #136225 - fmease:rollup-fm7m744, r=fmease
Rollup of 7 pull requests Successful merges: - #135625 ([cfg_match] Document the use of expressions.) - #135902 (Do not consider child bound assumptions for rigid alias) - #135943 (Rename `Piece::String` to `Piece::Lit`) - #136104 (Add mermaid graphs of NLL regions and SCCs to polonius MIR dump) - #136143 (Update books) - #136147 (ABI-required target features: warn when they are missing in base CPU) - #136164 (Refactor FnKind variant to hold &Fn) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ccc9ba5c30
66 changed files with 580 additions and 412 deletions
|
@ -954,8 +954,14 @@ fn walk_coroutine_kind<T: MutVisitor>(vis: &mut T, coroutine_kind: &mut Coroutin
|
||||||
|
|
||||||
fn walk_fn<T: MutVisitor>(vis: &mut T, kind: FnKind<'_>) {
|
fn walk_fn<T: MutVisitor>(vis: &mut T, kind: FnKind<'_>) {
|
||||||
match kind {
|
match kind {
|
||||||
FnKind::Fn(_ctxt, _ident, FnSig { header, decl, span }, _visibility, generics, body) => {
|
FnKind::Fn(
|
||||||
|
_ctxt,
|
||||||
|
_ident,
|
||||||
|
_vis,
|
||||||
|
Fn { defaultness, generics, body, sig: FnSig { header, decl, span } },
|
||||||
|
) => {
|
||||||
// Identifier and visibility are visited as a part of the item.
|
// Identifier and visibility are visited as a part of the item.
|
||||||
|
visit_defaultness(vis, defaultness);
|
||||||
vis.visit_fn_header(header);
|
vis.visit_fn_header(header);
|
||||||
vis.visit_generics(generics);
|
vis.visit_generics(generics);
|
||||||
vis.visit_fn_decl(decl);
|
vis.visit_fn_decl(decl);
|
||||||
|
@ -1205,13 +1211,8 @@ impl WalkItemKind for ItemKind {
|
||||||
ItemKind::Const(item) => {
|
ItemKind::Const(item) => {
|
||||||
visit_const_item(item, vis);
|
visit_const_item(item, vis);
|
||||||
}
|
}
|
||||||
ItemKind::Fn(box Fn { defaultness, generics, sig, body }) => {
|
ItemKind::Fn(func) => {
|
||||||
visit_defaultness(vis, defaultness);
|
vis.visit_fn(FnKind::Fn(FnCtxt::Free, ident, visibility, &mut *func), span, id);
|
||||||
vis.visit_fn(
|
|
||||||
FnKind::Fn(FnCtxt::Free, ident, sig, visibility, generics, body),
|
|
||||||
span,
|
|
||||||
id,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
ItemKind::Mod(safety, mod_kind) => {
|
ItemKind::Mod(safety, mod_kind) => {
|
||||||
visit_safety(vis, safety);
|
visit_safety(vis, safety);
|
||||||
|
@ -1329,10 +1330,9 @@ impl WalkItemKind for AssocItemKind {
|
||||||
AssocItemKind::Const(item) => {
|
AssocItemKind::Const(item) => {
|
||||||
visit_const_item(item, visitor);
|
visit_const_item(item, visitor);
|
||||||
}
|
}
|
||||||
AssocItemKind::Fn(box Fn { defaultness, generics, sig, body }) => {
|
AssocItemKind::Fn(func) => {
|
||||||
visit_defaultness(visitor, defaultness);
|
|
||||||
visitor.visit_fn(
|
visitor.visit_fn(
|
||||||
FnKind::Fn(FnCtxt::Assoc(ctxt), ident, sig, visibility, generics, body),
|
FnKind::Fn(FnCtxt::Assoc(ctxt), ident, visibility, &mut *func),
|
||||||
span,
|
span,
|
||||||
id,
|
id,
|
||||||
);
|
);
|
||||||
|
@ -1476,10 +1476,9 @@ impl WalkItemKind for ForeignItemKind {
|
||||||
visitor.visit_ty(ty);
|
visitor.visit_ty(ty);
|
||||||
visit_opt(expr, |expr| visitor.visit_expr(expr));
|
visit_opt(expr, |expr| visitor.visit_expr(expr));
|
||||||
}
|
}
|
||||||
ForeignItemKind::Fn(box Fn { defaultness, generics, sig, body }) => {
|
ForeignItemKind::Fn(func) => {
|
||||||
visit_defaultness(visitor, defaultness);
|
|
||||||
visitor.visit_fn(
|
visitor.visit_fn(
|
||||||
FnKind::Fn(FnCtxt::Foreign, ident, sig, visibility, generics, body),
|
FnKind::Fn(FnCtxt::Foreign, ident, visibility, &mut *func),
|
||||||
span,
|
span,
|
||||||
id,
|
id,
|
||||||
);
|
);
|
||||||
|
@ -1965,14 +1964,7 @@ impl<N: DummyAstNode, T: DummyAstNode> DummyAstNode for crate::ast_traits::AstNo
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum FnKind<'a> {
|
pub enum FnKind<'a> {
|
||||||
/// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`.
|
/// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`.
|
||||||
Fn(
|
Fn(FnCtxt, &'a mut Ident, &'a mut Visibility, &'a mut Fn),
|
||||||
FnCtxt,
|
|
||||||
&'a mut Ident,
|
|
||||||
&'a mut FnSig,
|
|
||||||
&'a mut Visibility,
|
|
||||||
&'a mut Generics,
|
|
||||||
&'a mut Option<P<Block>>,
|
|
||||||
),
|
|
||||||
|
|
||||||
/// E.g., `|x, y| body`.
|
/// E.g., `|x, y| body`.
|
||||||
Closure(
|
Closure(
|
||||||
|
|
|
@ -65,7 +65,7 @@ impl BoundKind {
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub enum FnKind<'a> {
|
pub enum FnKind<'a> {
|
||||||
/// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`.
|
/// E.g., `fn foo()`, `fn foo(&self)`, or `extern "Abi" fn foo()`.
|
||||||
Fn(FnCtxt, &'a Ident, &'a FnSig, &'a Visibility, &'a Generics, &'a Option<P<Block>>),
|
Fn(FnCtxt, &'a Ident, &'a Visibility, &'a Fn),
|
||||||
|
|
||||||
/// E.g., `|x, y| body`.
|
/// E.g., `|x, y| body`.
|
||||||
Closure(&'a ClosureBinder, &'a Option<CoroutineKind>, &'a FnDecl, &'a Expr),
|
Closure(&'a ClosureBinder, &'a Option<CoroutineKind>, &'a FnDecl, &'a Expr),
|
||||||
|
@ -74,7 +74,7 @@ pub enum FnKind<'a> {
|
||||||
impl<'a> FnKind<'a> {
|
impl<'a> FnKind<'a> {
|
||||||
pub fn header(&self) -> Option<&'a FnHeader> {
|
pub fn header(&self) -> Option<&'a FnHeader> {
|
||||||
match *self {
|
match *self {
|
||||||
FnKind::Fn(_, _, sig, _, _, _) => Some(&sig.header),
|
FnKind::Fn(_, _, _, Fn { sig, .. }) => Some(&sig.header),
|
||||||
FnKind::Closure(..) => None,
|
FnKind::Closure(..) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ impl<'a> FnKind<'a> {
|
||||||
|
|
||||||
pub fn decl(&self) -> &'a FnDecl {
|
pub fn decl(&self) -> &'a FnDecl {
|
||||||
match self {
|
match self {
|
||||||
FnKind::Fn(_, _, sig, _, _, _) => &sig.decl,
|
FnKind::Fn(_, _, _, Fn { sig, .. }) => &sig.decl,
|
||||||
FnKind::Closure(_, _, decl, _) => decl,
|
FnKind::Closure(_, _, decl, _) => decl,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -374,8 +374,8 @@ impl WalkItemKind for ItemKind {
|
||||||
try_visit!(visitor.visit_ty(ty));
|
try_visit!(visitor.visit_ty(ty));
|
||||||
visit_opt!(visitor, visit_expr, expr);
|
visit_opt!(visitor, visit_expr, expr);
|
||||||
}
|
}
|
||||||
ItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
|
ItemKind::Fn(func) => {
|
||||||
let kind = FnKind::Fn(FnCtxt::Free, ident, sig, vis, generics, body);
|
let kind = FnKind::Fn(FnCtxt::Free, ident, vis, &*func);
|
||||||
try_visit!(visitor.visit_fn(kind, span, id));
|
try_visit!(visitor.visit_fn(kind, span, id));
|
||||||
}
|
}
|
||||||
ItemKind::Mod(_unsafety, mod_kind) => match mod_kind {
|
ItemKind::Mod(_unsafety, mod_kind) => match mod_kind {
|
||||||
|
@ -715,8 +715,8 @@ impl WalkItemKind for ForeignItemKind {
|
||||||
try_visit!(visitor.visit_ty(ty));
|
try_visit!(visitor.visit_ty(ty));
|
||||||
visit_opt!(visitor, visit_expr, expr);
|
visit_opt!(visitor, visit_expr, expr);
|
||||||
}
|
}
|
||||||
ForeignItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
|
ForeignItemKind::Fn(func) => {
|
||||||
let kind = FnKind::Fn(FnCtxt::Foreign, ident, sig, vis, generics, body);
|
let kind = FnKind::Fn(FnCtxt::Foreign, ident, vis, &*func);
|
||||||
try_visit!(visitor.visit_fn(kind, span, id));
|
try_visit!(visitor.visit_fn(kind, span, id));
|
||||||
}
|
}
|
||||||
ForeignItemKind::TyAlias(box TyAlias {
|
ForeignItemKind::TyAlias(box TyAlias {
|
||||||
|
@ -858,7 +858,12 @@ pub fn walk_fn_decl<'a, V: Visitor<'a>>(
|
||||||
|
|
||||||
pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>) -> V::Result {
|
pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>) -> V::Result {
|
||||||
match kind {
|
match kind {
|
||||||
FnKind::Fn(_ctxt, _ident, FnSig { header, decl, span: _ }, _vis, generics, body) => {
|
FnKind::Fn(
|
||||||
|
_ctxt,
|
||||||
|
_ident,
|
||||||
|
_vis,
|
||||||
|
Fn { defaultness: _, sig: FnSig { header, decl, span: _ }, generics, body },
|
||||||
|
) => {
|
||||||
// Identifier and visibility are visited as a part of the item.
|
// Identifier and visibility are visited as a part of the item.
|
||||||
try_visit!(visitor.visit_fn_header(header));
|
try_visit!(visitor.visit_fn_header(header));
|
||||||
try_visit!(visitor.visit_generics(generics));
|
try_visit!(visitor.visit_generics(generics));
|
||||||
|
@ -892,8 +897,8 @@ impl WalkItemKind for AssocItemKind {
|
||||||
try_visit!(visitor.visit_ty(ty));
|
try_visit!(visitor.visit_ty(ty));
|
||||||
visit_opt!(visitor, visit_expr, expr);
|
visit_opt!(visitor, visit_expr, expr);
|
||||||
}
|
}
|
||||||
AssocItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
|
AssocItemKind::Fn(func) => {
|
||||||
let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), ident, sig, vis, generics, body);
|
let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), ident, vis, &*func);
|
||||||
try_visit!(visitor.visit_fn(kind, span, id));
|
try_visit!(visitor.visit_fn(kind, span, id));
|
||||||
}
|
}
|
||||||
AssocItemKind::Type(box TyAlias {
|
AssocItemKind::Type(box TyAlias {
|
||||||
|
|
|
@ -2125,7 +2125,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
self.arena.alloc(self.expr_call_mut(span, e, args))
|
self.arena.alloc(self.expr_call_mut(span, e, args))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_call_lang_item_fn_mut(
|
pub(super) fn expr_call_lang_item_fn_mut(
|
||||||
&mut self,
|
&mut self,
|
||||||
span: Span,
|
span: Span,
|
||||||
lang_item: hir::LangItem,
|
lang_item: hir::LangItem,
|
||||||
|
@ -2135,7 +2135,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
self.expr_call_mut(span, path, args)
|
self.expr_call_mut(span, path, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_call_lang_item_fn(
|
pub(super) fn expr_call_lang_item_fn(
|
||||||
&mut self,
|
&mut self,
|
||||||
span: Span,
|
span: Span,
|
||||||
lang_item: hir::LangItem,
|
lang_item: hir::LangItem,
|
||||||
|
|
|
@ -917,7 +917,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
walk_list!(self, visit_attribute, &item.attrs);
|
walk_list!(self, visit_attribute, &item.attrs);
|
||||||
return; // Avoid visiting again.
|
return; // Avoid visiting again.
|
||||||
}
|
}
|
||||||
ItemKind::Fn(box Fn { defaultness, sig, generics, body }) => {
|
ItemKind::Fn(func @ box Fn { defaultness, generics: _, sig, body }) => {
|
||||||
self.check_defaultness(item.span, *defaultness);
|
self.check_defaultness(item.span, *defaultness);
|
||||||
|
|
||||||
let is_intrinsic =
|
let is_intrinsic =
|
||||||
|
@ -947,7 +947,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
|
|
||||||
self.visit_vis(&item.vis);
|
self.visit_vis(&item.vis);
|
||||||
self.visit_ident(&item.ident);
|
self.visit_ident(&item.ident);
|
||||||
let kind = FnKind::Fn(FnCtxt::Free, &item.ident, sig, &item.vis, generics, body);
|
let kind = FnKind::Fn(FnCtxt::Free, &item.ident, &item.vis, &*func);
|
||||||
self.visit_fn(kind, item.span, item.id);
|
self.visit_fn(kind, item.span, item.id);
|
||||||
walk_list!(self, visit_attribute, &item.attrs);
|
walk_list!(self, visit_attribute, &item.attrs);
|
||||||
return; // Avoid visiting again.
|
return; // Avoid visiting again.
|
||||||
|
@ -1348,19 +1348,20 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let FnKind::Fn(
|
if let FnKind::Fn(
|
||||||
_,
|
|
||||||
_,
|
|
||||||
FnSig { header: FnHeader { ext: Extern::Implicit(extern_span), .. }, .. },
|
|
||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
_,
|
_,
|
||||||
|
Fn {
|
||||||
|
sig: FnSig { header: FnHeader { ext: Extern::Implicit(extern_span), .. }, .. },
|
||||||
|
..
|
||||||
|
},
|
||||||
) = fk
|
) = fk
|
||||||
{
|
{
|
||||||
self.maybe_lint_missing_abi(*extern_span, id);
|
self.maybe_lint_missing_abi(*extern_span, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Functions without bodies cannot have patterns.
|
// Functions without bodies cannot have patterns.
|
||||||
if let FnKind::Fn(ctxt, _, sig, _, _, None) = fk {
|
if let FnKind::Fn(ctxt, _, _, Fn { body: None, sig, .. }) = fk {
|
||||||
Self::check_decl_no_pat(&sig.decl, |span, ident, mut_ident| {
|
Self::check_decl_no_pat(&sig.decl, |span, ident, mut_ident| {
|
||||||
if mut_ident && matches!(ctxt, FnCtxt::Assoc(_)) {
|
if mut_ident && matches!(ctxt, FnCtxt::Assoc(_)) {
|
||||||
if let Some(ident) = ident {
|
if let Some(ident) = ident {
|
||||||
|
@ -1394,7 +1395,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
.is_some();
|
.is_some();
|
||||||
|
|
||||||
let disallowed = (!tilde_const_allowed).then(|| match fk {
|
let disallowed = (!tilde_const_allowed).then(|| match fk {
|
||||||
FnKind::Fn(_, ident, _, _, _, _) => TildeConstReason::Function { ident: ident.span },
|
FnKind::Fn(_, ident, _, _) => TildeConstReason::Function { ident: ident.span },
|
||||||
FnKind::Closure(..) => TildeConstReason::Closure,
|
FnKind::Closure(..) => TildeConstReason::Closure,
|
||||||
});
|
});
|
||||||
self.with_tilde_const(disallowed, |this| visit::walk_fn(this, fk));
|
self.with_tilde_const(disallowed, |this| visit::walk_fn(this, fk));
|
||||||
|
@ -1470,15 +1471,14 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||||
self.outer_trait_or_trait_impl.as_ref().and_then(TraitOrTraitImpl::constness).is_some();
|
self.outer_trait_or_trait_impl.as_ref().and_then(TraitOrTraitImpl::constness).is_some();
|
||||||
|
|
||||||
match &item.kind {
|
match &item.kind {
|
||||||
AssocItemKind::Fn(box Fn { sig, generics, body, .. })
|
AssocItemKind::Fn(func)
|
||||||
if parent_is_const
|
if parent_is_const
|
||||||
|| ctxt == AssocCtxt::Trait
|
|| ctxt == AssocCtxt::Trait
|
||||||
|| matches!(sig.header.constness, Const::Yes(_)) =>
|
|| matches!(func.sig.header.constness, Const::Yes(_)) =>
|
||||||
{
|
{
|
||||||
self.visit_vis(&item.vis);
|
self.visit_vis(&item.vis);
|
||||||
self.visit_ident(&item.ident);
|
self.visit_ident(&item.ident);
|
||||||
let kind =
|
let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), &item.ident, &item.vis, &*func);
|
||||||
FnKind::Fn(FnCtxt::Assoc(ctxt), &item.ident, sig, &item.vis, generics, body);
|
|
||||||
walk_list!(self, visit_attribute, &item.attrs);
|
walk_list!(self, visit_attribute, &item.attrs);
|
||||||
self.visit_fn(kind, item.span, item.id);
|
self.visit_fn(kind, item.span, item.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,8 +34,8 @@ impl<'a> State<'a> {
|
||||||
self.maybe_print_comment(span.lo());
|
self.maybe_print_comment(span.lo());
|
||||||
self.print_outer_attributes(attrs);
|
self.print_outer_attributes(attrs);
|
||||||
match kind {
|
match kind {
|
||||||
ast::ForeignItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => {
|
ast::ForeignItemKind::Fn(func) => {
|
||||||
self.print_fn_full(sig, ident, generics, vis, *defaultness, body.as_deref(), attrs);
|
self.print_fn_full(ident, vis, attrs, &*func);
|
||||||
}
|
}
|
||||||
ast::ForeignItemKind::Static(box ast::StaticItem { ty, mutability, expr, safety }) => {
|
ast::ForeignItemKind::Static(box ast::StaticItem { ty, mutability, expr, safety }) => {
|
||||||
self.print_item_const(
|
self.print_item_const(
|
||||||
|
@ -199,16 +199,8 @@ impl<'a> State<'a> {
|
||||||
*defaultness,
|
*defaultness,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ast::ItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => {
|
ast::ItemKind::Fn(func) => {
|
||||||
self.print_fn_full(
|
self.print_fn_full(item.ident, &item.vis, &item.attrs, &*func);
|
||||||
sig,
|
|
||||||
item.ident,
|
|
||||||
generics,
|
|
||||||
&item.vis,
|
|
||||||
*defaultness,
|
|
||||||
body.as_deref(),
|
|
||||||
&item.attrs,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
ast::ItemKind::Mod(safety, mod_kind) => {
|
ast::ItemKind::Mod(safety, mod_kind) => {
|
||||||
self.head(Self::to_string(|s| {
|
self.head(Self::to_string(|s| {
|
||||||
|
@ -542,8 +534,8 @@ impl<'a> State<'a> {
|
||||||
self.maybe_print_comment(span.lo());
|
self.maybe_print_comment(span.lo());
|
||||||
self.print_outer_attributes(attrs);
|
self.print_outer_attributes(attrs);
|
||||||
match kind {
|
match kind {
|
||||||
ast::AssocItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => {
|
ast::AssocItemKind::Fn(func) => {
|
||||||
self.print_fn_full(sig, ident, generics, vis, *defaultness, body.as_deref(), attrs);
|
self.print_fn_full(ident, vis, attrs, &*func);
|
||||||
}
|
}
|
||||||
ast::AssocItemKind::Const(box ast::ConstItem { defaultness, generics, ty, expr }) => {
|
ast::AssocItemKind::Const(box ast::ConstItem { defaultness, generics, ty, expr }) => {
|
||||||
self.print_item_const(
|
self.print_item_const(
|
||||||
|
@ -653,19 +645,17 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
fn print_fn_full(
|
fn print_fn_full(
|
||||||
&mut self,
|
&mut self,
|
||||||
sig: &ast::FnSig,
|
|
||||||
name: Ident,
|
name: Ident,
|
||||||
generics: &ast::Generics,
|
|
||||||
vis: &ast::Visibility,
|
vis: &ast::Visibility,
|
||||||
defaultness: ast::Defaultness,
|
|
||||||
body: Option<&ast::Block>,
|
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
|
func: &ast::Fn,
|
||||||
) {
|
) {
|
||||||
|
let ast::Fn { defaultness, generics, sig, body } = func;
|
||||||
if body.is_some() {
|
if body.is_some() {
|
||||||
self.head("");
|
self.head("");
|
||||||
}
|
}
|
||||||
self.print_visibility(vis);
|
self.print_visibility(vis);
|
||||||
self.print_defaultness(defaultness);
|
self.print_defaultness(*defaultness);
|
||||||
self.print_fn(&sig.decl, sig.header, Some(name), generics);
|
self.print_fn(&sig.decl, sig.header, Some(name), generics);
|
||||||
if let Some(body) = body {
|
if let Some(body) = body {
|
||||||
self.nbsp();
|
self.nbsp();
|
||||||
|
|
|
@ -1,14 +1,18 @@
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
|
use rustc_index::IndexVec;
|
||||||
use rustc_middle::mir::pretty::{
|
use rustc_middle::mir::pretty::{
|
||||||
PassWhere, PrettyPrintMirOptions, create_dump_file, dump_enabled, dump_mir_to_writer,
|
PassWhere, PrettyPrintMirOptions, create_dump_file, dump_enabled, dump_mir_to_writer,
|
||||||
};
|
};
|
||||||
use rustc_middle::mir::{Body, ClosureRegionRequirements};
|
use rustc_middle::mir::{Body, ClosureRegionRequirements};
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::{RegionVid, TyCtxt};
|
||||||
use rustc_session::config::MirIncludeSpans;
|
use rustc_session::config::MirIncludeSpans;
|
||||||
|
|
||||||
use crate::borrow_set::BorrowSet;
|
use crate::borrow_set::BorrowSet;
|
||||||
|
use crate::constraints::OutlivesConstraint;
|
||||||
use crate::polonius::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet};
|
use crate::polonius::{LocalizedOutlivesConstraint, LocalizedOutlivesConstraintSet};
|
||||||
|
use crate::type_check::Locations;
|
||||||
use crate::{BorrowckInferCtxt, RegionInferenceContext};
|
use crate::{BorrowckInferCtxt, RegionInferenceContext};
|
||||||
|
|
||||||
/// `-Zdump-mir=polonius` dumps MIR annotated with NLL and polonius specific information.
|
/// `-Zdump-mir=polonius` dumps MIR annotated with NLL and polonius specific information.
|
||||||
|
@ -50,6 +54,8 @@ pub(crate) fn dump_polonius_mir<'tcx>(
|
||||||
/// - the NLL MIR
|
/// - the NLL MIR
|
||||||
/// - the list of polonius localized constraints
|
/// - the list of polonius localized constraints
|
||||||
/// - a mermaid graph of the CFG
|
/// - a mermaid graph of the CFG
|
||||||
|
/// - a mermaid graph of the NLL regions and the constraints between them
|
||||||
|
/// - a mermaid graph of the NLL SCCs and the constraints between them
|
||||||
fn emit_polonius_dump<'tcx>(
|
fn emit_polonius_dump<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
body: &Body<'tcx>,
|
body: &Body<'tcx>,
|
||||||
|
@ -68,7 +74,7 @@ fn emit_polonius_dump<'tcx>(
|
||||||
// Section 1: the NLL + Polonius MIR.
|
// Section 1: the NLL + Polonius MIR.
|
||||||
writeln!(out, "<div>")?;
|
writeln!(out, "<div>")?;
|
||||||
writeln!(out, "Raw MIR dump")?;
|
writeln!(out, "Raw MIR dump")?;
|
||||||
writeln!(out, "<code><pre>")?;
|
writeln!(out, "<pre><code>")?;
|
||||||
emit_html_mir(
|
emit_html_mir(
|
||||||
tcx,
|
tcx,
|
||||||
body,
|
body,
|
||||||
|
@ -78,15 +84,31 @@ fn emit_polonius_dump<'tcx>(
|
||||||
closure_region_requirements,
|
closure_region_requirements,
|
||||||
out,
|
out,
|
||||||
)?;
|
)?;
|
||||||
writeln!(out, "</pre></code>")?;
|
writeln!(out, "</code></pre>")?;
|
||||||
writeln!(out, "</div>")?;
|
writeln!(out, "</div>")?;
|
||||||
|
|
||||||
// Section 2: mermaid visualization of the CFG.
|
// Section 2: mermaid visualization of the CFG.
|
||||||
writeln!(out, "<div>")?;
|
writeln!(out, "<div>")?;
|
||||||
writeln!(out, "Control-flow graph")?;
|
writeln!(out, "Control-flow graph")?;
|
||||||
writeln!(out, "<code><pre class='mermaid'>")?;
|
writeln!(out, "<pre class='mermaid'>")?;
|
||||||
emit_mermaid_cfg(body, out)?;
|
emit_mermaid_cfg(body, out)?;
|
||||||
writeln!(out, "</pre></code>")?;
|
writeln!(out, "</pre>")?;
|
||||||
|
writeln!(out, "</div>")?;
|
||||||
|
|
||||||
|
// Section 3: mermaid visualization of the NLL region graph.
|
||||||
|
writeln!(out, "<div>")?;
|
||||||
|
writeln!(out, "NLL regions")?;
|
||||||
|
writeln!(out, "<pre class='mermaid'>")?;
|
||||||
|
emit_mermaid_nll_regions(regioncx, out)?;
|
||||||
|
writeln!(out, "</pre>")?;
|
||||||
|
writeln!(out, "</div>")?;
|
||||||
|
|
||||||
|
// Section 4: mermaid visualization of the NLL SCC graph.
|
||||||
|
writeln!(out, "<div>")?;
|
||||||
|
writeln!(out, "NLL SCCs")?;
|
||||||
|
writeln!(out, "<pre class='mermaid'>")?;
|
||||||
|
emit_mermaid_nll_sccs(regioncx, out)?;
|
||||||
|
writeln!(out, "</pre>")?;
|
||||||
writeln!(out, "</div>")?;
|
writeln!(out, "</div>")?;
|
||||||
|
|
||||||
// Finalize the dump with the HTML epilogue.
|
// Finalize the dump with the HTML epilogue.
|
||||||
|
@ -261,3 +283,112 @@ fn emit_mermaid_cfg(body: &Body<'_>, out: &mut dyn io::Write) -> io::Result<()>
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Emits a region's label: index, universe, external name.
|
||||||
|
fn render_region(
|
||||||
|
region: RegionVid,
|
||||||
|
regioncx: &RegionInferenceContext<'_>,
|
||||||
|
out: &mut dyn io::Write,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
let def = regioncx.region_definition(region);
|
||||||
|
let universe = def.universe;
|
||||||
|
|
||||||
|
write!(out, "'{}", region.as_usize())?;
|
||||||
|
if !universe.is_root() {
|
||||||
|
write!(out, "/{universe:?}")?;
|
||||||
|
}
|
||||||
|
if let Some(name) = def.external_name.and_then(|e| e.get_name()) {
|
||||||
|
write!(out, " ({name})")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emits a mermaid flowchart of the NLL regions and the outlives constraints between them, similar
|
||||||
|
/// to the graphviz version.
|
||||||
|
fn emit_mermaid_nll_regions<'tcx>(
|
||||||
|
regioncx: &RegionInferenceContext<'tcx>,
|
||||||
|
out: &mut dyn io::Write,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
// The mermaid chart type: a top-down flowchart.
|
||||||
|
writeln!(out, "flowchart TD")?;
|
||||||
|
|
||||||
|
// Emit the region nodes.
|
||||||
|
for region in regioncx.var_infos.indices() {
|
||||||
|
write!(out, "{}[\"", region.as_usize())?;
|
||||||
|
render_region(region, regioncx, out)?;
|
||||||
|
writeln!(out, "\"]")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a set of edges to check for the reverse edge being present.
|
||||||
|
let edges: FxHashSet<_> = regioncx.outlives_constraints().map(|c| (c.sup, c.sub)).collect();
|
||||||
|
|
||||||
|
// Order (and deduplicate) edges for traversal, to display them in a generally increasing order.
|
||||||
|
let constraint_key = |c: &OutlivesConstraint<'_>| {
|
||||||
|
let min = c.sup.min(c.sub);
|
||||||
|
let max = c.sup.max(c.sub);
|
||||||
|
(min, max)
|
||||||
|
};
|
||||||
|
let mut ordered_edges: Vec<_> = regioncx.outlives_constraints().collect();
|
||||||
|
ordered_edges.sort_by_key(|c| constraint_key(c));
|
||||||
|
ordered_edges.dedup_by_key(|c| constraint_key(c));
|
||||||
|
|
||||||
|
for outlives in ordered_edges {
|
||||||
|
// Source node.
|
||||||
|
write!(out, "{} ", outlives.sup.as_usize())?;
|
||||||
|
|
||||||
|
// The kind of arrow: bidirectional if the opposite edge exists in the set.
|
||||||
|
if edges.contains(&(outlives.sub, outlives.sup)) {
|
||||||
|
write!(out, "<")?;
|
||||||
|
}
|
||||||
|
write!(out, "-- ")?;
|
||||||
|
|
||||||
|
// Edge label from its `Locations`.
|
||||||
|
match outlives.locations {
|
||||||
|
Locations::All(_) => write!(out, "All")?,
|
||||||
|
Locations::Single(location) => write!(out, "{:?}", location)?,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Target node.
|
||||||
|
writeln!(out, " --> {}", outlives.sub.as_usize())?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Emits a mermaid flowchart of the NLL SCCs and the outlives constraints between them, similar
|
||||||
|
/// to the graphviz version.
|
||||||
|
fn emit_mermaid_nll_sccs<'tcx>(
|
||||||
|
regioncx: &RegionInferenceContext<'tcx>,
|
||||||
|
out: &mut dyn io::Write,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
// The mermaid chart type: a top-down flowchart.
|
||||||
|
writeln!(out, "flowchart TD")?;
|
||||||
|
|
||||||
|
// Gather and emit the SCC nodes.
|
||||||
|
let mut nodes_per_scc: IndexVec<_, _> =
|
||||||
|
regioncx.constraint_sccs().all_sccs().map(|_| Vec::new()).collect();
|
||||||
|
for region in regioncx.var_infos.indices() {
|
||||||
|
let scc = regioncx.constraint_sccs().scc(region);
|
||||||
|
nodes_per_scc[scc].push(region);
|
||||||
|
}
|
||||||
|
for (scc, regions) in nodes_per_scc.iter_enumerated() {
|
||||||
|
// The node label: the regions contained in the SCC.
|
||||||
|
write!(out, "{scc}[\"SCC({scc}) = {{", scc = scc.as_usize())?;
|
||||||
|
for (idx, ®ion) in regions.iter().enumerate() {
|
||||||
|
render_region(region, regioncx, out)?;
|
||||||
|
if idx < regions.len() - 1 {
|
||||||
|
write!(out, ",")?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
writeln!(out, "}}\"]")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Emit the edges between SCCs.
|
||||||
|
let edges = regioncx.constraint_sccs().all_sccs().flat_map(|source| {
|
||||||
|
regioncx.constraint_sccs().successors(source).iter().map(move |&target| (source, target))
|
||||||
|
});
|
||||||
|
for (source, target) in edges {
|
||||||
|
writeln!(out, "{} --> {}", source.as_usize(), target.as_usize())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -651,7 +651,7 @@ fn expand_preparsed_asm(
|
||||||
.map(|span| template_span.from_inner(InnerSpan::new(span.start, span.end)));
|
.map(|span| template_span.from_inner(InnerSpan::new(span.start, span.end)));
|
||||||
for piece in unverified_pieces {
|
for piece in unverified_pieces {
|
||||||
match piece {
|
match piece {
|
||||||
parse::Piece::String(s) => {
|
parse::Piece::Lit(s) => {
|
||||||
template.push(ast::InlineAsmTemplatePiece::String(s.to_string().into()))
|
template.push(ast::InlineAsmTemplatePiece::String(s.to_string().into()))
|
||||||
}
|
}
|
||||||
parse::Piece::NextArgument(arg) => {
|
parse::Piece::NextArgument(arg) => {
|
||||||
|
|
|
@ -406,7 +406,7 @@ fn make_format_args(
|
||||||
|
|
||||||
for piece in &pieces {
|
for piece in &pieces {
|
||||||
match *piece {
|
match *piece {
|
||||||
parse::Piece::String(s) => {
|
parse::Piece::Lit(s) => {
|
||||||
unfinished_literal.push_str(s);
|
unfinished_literal.push_str(s);
|
||||||
}
|
}
|
||||||
parse::Piece::NextArgument(box parse::Argument { position, position_span, format }) => {
|
parse::Piece::NextArgument(box parse::Argument { position, position_span, format }) => {
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
use std::iter::FromIterator;
|
|
||||||
|
|
||||||
#[cfg(feature = "master")]
|
#[cfg(feature = "master")]
|
||||||
use gccjit::Context;
|
use gccjit::Context;
|
||||||
use rustc_codegen_ssa::codegen_attrs::check_tied_features;
|
use rustc_codegen_ssa::codegen_attrs::check_tied_features;
|
||||||
use rustc_codegen_ssa::errors::TargetFeatureDisableOrEnable;
|
use rustc_codegen_ssa::errors::TargetFeatureDisableOrEnable;
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::unord::UnordSet;
|
use rustc_data_structures::unord::UnordSet;
|
||||||
use rustc_session::Session;
|
use rustc_session::Session;
|
||||||
use rustc_target::target_features::RUSTC_SPECIFIC_FEATURES;
|
use rustc_target::target_features::RUSTC_SPECIFIC_FEATURES;
|
||||||
|
@ -45,12 +43,6 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri
|
||||||
let known_features = sess.target.rust_target_features();
|
let known_features = sess.target.rust_target_features();
|
||||||
let mut featsmap = FxHashMap::default();
|
let mut featsmap = FxHashMap::default();
|
||||||
|
|
||||||
// Ensure that all ABI-required features are enabled, and the ABI-forbidden ones
|
|
||||||
// are disabled.
|
|
||||||
let abi_feature_constraints = sess.target.abi_required_features();
|
|
||||||
let abi_incompatible_set =
|
|
||||||
FxHashSet::from_iter(abi_feature_constraints.incompatible.iter().copied());
|
|
||||||
|
|
||||||
// Compute implied features
|
// Compute implied features
|
||||||
let mut all_rust_features = vec![];
|
let mut all_rust_features = vec![];
|
||||||
for feature in sess.opts.cg.target_feature.split(',') {
|
for feature in sess.opts.cg.target_feature.split(',') {
|
||||||
|
@ -117,51 +109,11 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure that the features we enable/disable are compatible with the ABI.
|
|
||||||
if enable {
|
|
||||||
if abi_incompatible_set.contains(feature) {
|
|
||||||
sess.dcx().emit_warn(ForbiddenCTargetFeature {
|
|
||||||
feature,
|
|
||||||
enabled: "enabled",
|
|
||||||
reason: "this feature is incompatible with the target ABI",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// FIXME: we have to request implied features here since
|
|
||||||
// negative features do not handle implied features above.
|
|
||||||
for &required in abi_feature_constraints.required.iter() {
|
|
||||||
let implied = sess.target.implied_target_features(std::iter::once(required));
|
|
||||||
if implied.contains(feature) {
|
|
||||||
sess.dcx().emit_warn(ForbiddenCTargetFeature {
|
|
||||||
feature,
|
|
||||||
enabled: "disabled",
|
|
||||||
reason: "this feature is required by the target ABI",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME(nagisa): figure out how to not allocate a full hashset here.
|
// FIXME(nagisa): figure out how to not allocate a full hashset here.
|
||||||
featsmap.insert(feature, enable);
|
featsmap.insert(feature, enable);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// To be sure the ABI-relevant features are all in the right state, we explicitly
|
|
||||||
// (un)set them here. This means if the target spec sets those features wrong,
|
|
||||||
// we will silently correct them rather than silently producing wrong code.
|
|
||||||
// (The target sanity check tries to catch this, but we can't know which features are
|
|
||||||
// enabled in GCC by default so we can't be fully sure about that check.)
|
|
||||||
// We add these at the beginning of the list so that `-Ctarget-features` can
|
|
||||||
// still override it... that's unsound, but more compatible with past behavior.
|
|
||||||
all_rust_features.splice(
|
|
||||||
0..0,
|
|
||||||
abi_feature_constraints
|
|
||||||
.required
|
|
||||||
.iter()
|
|
||||||
.map(|&f| (true, f))
|
|
||||||
.chain(abi_feature_constraints.incompatible.iter().map(|&f| (false, f))),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Translate this into GCC features.
|
// Translate this into GCC features.
|
||||||
let feats =
|
let feats =
|
||||||
all_rust_features.iter().flat_map(|&(enable, feature)| {
|
all_rust_features.iter().flat_map(|&(enable, feature)| {
|
||||||
|
|
|
@ -493,9 +493,10 @@ fn target_features_cfg(
|
||||||
sess.target
|
sess.target
|
||||||
.rust_target_features()
|
.rust_target_features()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&(_, gate, _)| gate.in_cfg())
|
|
||||||
.filter_map(|&(feature, gate, _)| {
|
.filter_map(|&(feature, gate, _)| {
|
||||||
if sess.is_nightly_build() || allow_unstable || gate.requires_nightly().is_none() {
|
if allow_unstable
|
||||||
|
|| (gate.in_cfg() && (sess.is_nightly_build() || gate.requires_nightly().is_none()))
|
||||||
|
{
|
||||||
Some(feature)
|
Some(feature)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
|
|
@ -319,7 +319,6 @@ pub fn target_features_cfg(sess: &Session, allow_unstable: bool) -> Vec<Symbol>
|
||||||
sess.target
|
sess.target
|
||||||
.rust_target_features()
|
.rust_target_features()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(_, gate, _)| gate.in_cfg())
|
|
||||||
.filter(|(feature, _, _)| {
|
.filter(|(feature, _, _)| {
|
||||||
// skip checking special features, as LLVM may not understand them
|
// skip checking special features, as LLVM may not understand them
|
||||||
if RUSTC_SPECIAL_FEATURES.contains(feature) {
|
if RUSTC_SPECIAL_FEATURES.contains(feature) {
|
||||||
|
@ -388,9 +387,13 @@ pub fn target_features_cfg(sess: &Session, allow_unstable: bool) -> Vec<Symbol>
|
||||||
sess.target
|
sess.target
|
||||||
.rust_target_features()
|
.rust_target_features()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(_, gate, _)| gate.in_cfg())
|
|
||||||
.filter_map(|(feature, gate, _)| {
|
.filter_map(|(feature, gate, _)| {
|
||||||
if sess.is_nightly_build() || allow_unstable || gate.requires_nightly().is_none() {
|
// The `allow_unstable` set is used by rustc internally to determined which target
|
||||||
|
// features are truly available, so we want to return even perma-unstable "forbidden"
|
||||||
|
// features.
|
||||||
|
if allow_unstable
|
||||||
|
|| (gate.in_cfg() && (sess.is_nightly_build() || gate.requires_nightly().is_none()))
|
||||||
|
{
|
||||||
Some(*feature)
|
Some(*feature)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -670,12 +673,6 @@ pub(crate) fn global_llvm_features(
|
||||||
// Will only be filled when `diagnostics` is set!
|
// Will only be filled when `diagnostics` is set!
|
||||||
let mut featsmap = FxHashMap::default();
|
let mut featsmap = FxHashMap::default();
|
||||||
|
|
||||||
// Ensure that all ABI-required features are enabled, and the ABI-forbidden ones
|
|
||||||
// are disabled.
|
|
||||||
let abi_feature_constraints = sess.target.abi_required_features();
|
|
||||||
let abi_incompatible_set =
|
|
||||||
FxHashSet::from_iter(abi_feature_constraints.incompatible.iter().copied());
|
|
||||||
|
|
||||||
// Compute implied features
|
// Compute implied features
|
||||||
let mut all_rust_features = vec![];
|
let mut all_rust_features = vec![];
|
||||||
for feature in sess.opts.cg.target_feature.split(',') {
|
for feature in sess.opts.cg.target_feature.split(',') {
|
||||||
|
@ -746,52 +743,11 @@ pub(crate) fn global_llvm_features(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure that the features we enable/disable are compatible with the ABI.
|
|
||||||
if enable {
|
|
||||||
if abi_incompatible_set.contains(feature) {
|
|
||||||
sess.dcx().emit_warn(ForbiddenCTargetFeature {
|
|
||||||
feature,
|
|
||||||
enabled: "enabled",
|
|
||||||
reason: "this feature is incompatible with the target ABI",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// FIXME: we have to request implied features here since
|
|
||||||
// negative features do not handle implied features above.
|
|
||||||
for &required in abi_feature_constraints.required.iter() {
|
|
||||||
let implied =
|
|
||||||
sess.target.implied_target_features(std::iter::once(required));
|
|
||||||
if implied.contains(feature) {
|
|
||||||
sess.dcx().emit_warn(ForbiddenCTargetFeature {
|
|
||||||
feature,
|
|
||||||
enabled: "disabled",
|
|
||||||
reason: "this feature is required by the target ABI",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME(nagisa): figure out how to not allocate a full hashset here.
|
// FIXME(nagisa): figure out how to not allocate a full hashset here.
|
||||||
featsmap.insert(feature, enable);
|
featsmap.insert(feature, enable);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// To be sure the ABI-relevant features are all in the right state, we explicitly
|
|
||||||
// (un)set them here. This means if the target spec sets those features wrong,
|
|
||||||
// we will silently correct them rather than silently producing wrong code.
|
|
||||||
// (The target sanity check tries to catch this, but we can't know which features are
|
|
||||||
// enabled in LLVM by default so we can't be fully sure about that check.)
|
|
||||||
// We add these at the beginning of the list so that `-Ctarget-features` can
|
|
||||||
// still override it... that's unsound, but more compatible with past behavior.
|
|
||||||
all_rust_features.splice(
|
|
||||||
0..0,
|
|
||||||
abi_feature_constraints
|
|
||||||
.required
|
|
||||||
.iter()
|
|
||||||
.map(|&f| (true, f))
|
|
||||||
.chain(abi_feature_constraints.incompatible.iter().map(|&f| (false, f))),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Translate this into LLVM features.
|
// Translate this into LLVM features.
|
||||||
let feats = all_rust_features
|
let feats = all_rust_features
|
||||||
.iter()
|
.iter()
|
||||||
|
|
|
@ -457,7 +457,7 @@ fn fn_sig_suggestion<'tcx>(
|
||||||
|
|
||||||
let asyncness = if tcx.asyncness(assoc.def_id).is_async() {
|
let asyncness = if tcx.asyncness(assoc.def_id).is_async() {
|
||||||
output = if let ty::Alias(_, alias_ty) = *output.kind() {
|
output = if let ty::Alias(_, alias_ty) = *output.kind() {
|
||||||
tcx.explicit_item_super_predicates(alias_ty.def_id)
|
tcx.explicit_item_self_bounds(alias_ty.def_id)
|
||||||
.iter_instantiated_copied(tcx, alias_ty.args)
|
.iter_instantiated_copied(tcx, alias_ty.args)
|
||||||
.find_map(|(bound, _)| {
|
.find_map(|(bound, _)| {
|
||||||
bound.as_projection_clause()?.no_bound_vars()?.term.as_type()
|
bound.as_projection_clause()?.no_bound_vars()?.term.as_type()
|
||||||
|
|
|
@ -65,9 +65,9 @@ pub fn provide(providers: &mut Providers) {
|
||||||
type_alias_is_lazy: type_of::type_alias_is_lazy,
|
type_alias_is_lazy: type_of::type_alias_is_lazy,
|
||||||
item_bounds: item_bounds::item_bounds,
|
item_bounds: item_bounds::item_bounds,
|
||||||
explicit_item_bounds: item_bounds::explicit_item_bounds,
|
explicit_item_bounds: item_bounds::explicit_item_bounds,
|
||||||
item_super_predicates: item_bounds::item_super_predicates,
|
item_self_bounds: item_bounds::item_self_bounds,
|
||||||
explicit_item_super_predicates: item_bounds::explicit_item_super_predicates,
|
explicit_item_self_bounds: item_bounds::explicit_item_self_bounds,
|
||||||
item_non_self_assumptions: item_bounds::item_non_self_assumptions,
|
item_non_self_bounds: item_bounds::item_non_self_bounds,
|
||||||
impl_super_outlives: item_bounds::impl_super_outlives,
|
impl_super_outlives: item_bounds::impl_super_outlives,
|
||||||
generics_of: generics_of::generics_of,
|
generics_of: generics_of::generics_of,
|
||||||
predicates_of: predicates_of::predicates_of,
|
predicates_of: predicates_of::predicates_of,
|
||||||
|
@ -328,9 +328,9 @@ impl<'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'tcx> {
|
||||||
self.tcx.ensure().generics_of(def_id);
|
self.tcx.ensure().generics_of(def_id);
|
||||||
self.tcx.ensure().predicates_of(def_id);
|
self.tcx.ensure().predicates_of(def_id);
|
||||||
self.tcx.ensure().explicit_item_bounds(def_id);
|
self.tcx.ensure().explicit_item_bounds(def_id);
|
||||||
self.tcx.ensure().explicit_item_super_predicates(def_id);
|
self.tcx.ensure().explicit_item_self_bounds(def_id);
|
||||||
self.tcx.ensure().item_bounds(def_id);
|
self.tcx.ensure().item_bounds(def_id);
|
||||||
self.tcx.ensure().item_super_predicates(def_id);
|
self.tcx.ensure().item_self_bounds(def_id);
|
||||||
if self.tcx.is_conditionally_const(def_id) {
|
if self.tcx.is_conditionally_const(def_id) {
|
||||||
self.tcx.ensure().explicit_implied_const_bounds(def_id);
|
self.tcx.ensure().explicit_implied_const_bounds(def_id);
|
||||||
self.tcx.ensure().const_conditions(def_id);
|
self.tcx.ensure().const_conditions(def_id);
|
||||||
|
@ -822,7 +822,7 @@ fn lower_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) {
|
||||||
|
|
||||||
hir::TraitItemKind::Type(_, Some(_)) => {
|
hir::TraitItemKind::Type(_, Some(_)) => {
|
||||||
tcx.ensure().item_bounds(def_id);
|
tcx.ensure().item_bounds(def_id);
|
||||||
tcx.ensure().item_super_predicates(def_id);
|
tcx.ensure().item_self_bounds(def_id);
|
||||||
tcx.ensure().type_of(def_id);
|
tcx.ensure().type_of(def_id);
|
||||||
// Account for `type T = _;`.
|
// Account for `type T = _;`.
|
||||||
let mut visitor = HirPlaceholderCollector::default();
|
let mut visitor = HirPlaceholderCollector::default();
|
||||||
|
@ -839,7 +839,7 @@ fn lower_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) {
|
||||||
|
|
||||||
hir::TraitItemKind::Type(_, None) => {
|
hir::TraitItemKind::Type(_, None) => {
|
||||||
tcx.ensure().item_bounds(def_id);
|
tcx.ensure().item_bounds(def_id);
|
||||||
tcx.ensure().item_super_predicates(def_id);
|
tcx.ensure().item_self_bounds(def_id);
|
||||||
// #74612: Visit and try to find bad placeholders
|
// #74612: Visit and try to find bad placeholders
|
||||||
// even if there is no concrete type.
|
// even if there is no concrete type.
|
||||||
let mut visitor = HirPlaceholderCollector::default();
|
let mut visitor = HirPlaceholderCollector::default();
|
||||||
|
|
|
@ -350,7 +350,7 @@ pub(super) fn explicit_item_bounds(
|
||||||
explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::All)
|
explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::All)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn explicit_item_super_predicates(
|
pub(super) fn explicit_item_self_bounds(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
def_id: LocalDefId,
|
def_id: LocalDefId,
|
||||||
) -> ty::EarlyBinder<'_, &'_ [(ty::Clause<'_>, Span)]> {
|
) -> ty::EarlyBinder<'_, &'_ [(ty::Clause<'_>, Span)]> {
|
||||||
|
@ -434,11 +434,11 @@ pub(super) fn item_bounds(tcx: TyCtxt<'_>, def_id: DefId) -> ty::EarlyBinder<'_,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn item_super_predicates(
|
pub(super) fn item_self_bounds(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
def_id: DefId,
|
def_id: DefId,
|
||||||
) -> ty::EarlyBinder<'_, ty::Clauses<'_>> {
|
) -> ty::EarlyBinder<'_, ty::Clauses<'_>> {
|
||||||
tcx.explicit_item_super_predicates(def_id).map_bound(|bounds| {
|
tcx.explicit_item_self_bounds(def_id).map_bound(|bounds| {
|
||||||
tcx.mk_clauses_from_iter(
|
tcx.mk_clauses_from_iter(
|
||||||
util::elaborate(tcx, bounds.iter().map(|&(bound, _span)| bound)).filter_only_self(),
|
util::elaborate(tcx, bounds.iter().map(|&(bound, _span)| bound)).filter_only_self(),
|
||||||
)
|
)
|
||||||
|
@ -447,13 +447,12 @@ pub(super) fn item_super_predicates(
|
||||||
|
|
||||||
/// This exists as an optimization to compute only the item bounds of the item
|
/// This exists as an optimization to compute only the item bounds of the item
|
||||||
/// that are not `Self` bounds.
|
/// that are not `Self` bounds.
|
||||||
pub(super) fn item_non_self_assumptions(
|
pub(super) fn item_non_self_bounds(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
def_id: DefId,
|
def_id: DefId,
|
||||||
) -> ty::EarlyBinder<'_, ty::Clauses<'_>> {
|
) -> ty::EarlyBinder<'_, ty::Clauses<'_>> {
|
||||||
let all_bounds: FxIndexSet<_> = tcx.item_bounds(def_id).skip_binder().iter().collect();
|
let all_bounds: FxIndexSet<_> = tcx.item_bounds(def_id).skip_binder().iter().collect();
|
||||||
let own_bounds: FxIndexSet<_> =
|
let own_bounds: FxIndexSet<_> = tcx.item_self_bounds(def_id).skip_binder().iter().collect();
|
||||||
tcx.item_super_predicates(def_id).skip_binder().iter().collect();
|
|
||||||
if all_bounds.len() == own_bounds.len() {
|
if all_bounds.len() == own_bounds.len() {
|
||||||
ty::EarlyBinder::bind(ty::ListWithCachedTypeInfo::empty())
|
ty::EarlyBinder::bind(ty::ListWithCachedTypeInfo::empty())
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -308,7 +308,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
expected_ty,
|
expected_ty,
|
||||||
closure_kind,
|
closure_kind,
|
||||||
self.tcx
|
self.tcx
|
||||||
.explicit_item_super_predicates(def_id)
|
.explicit_item_self_bounds(def_id)
|
||||||
.iter_instantiated_copied(self.tcx, args)
|
.iter_instantiated_copied(self.tcx, args)
|
||||||
.map(|(c, s)| (c.as_predicate(), s)),
|
.map(|(c, s)| (c.as_predicate(), s)),
|
||||||
),
|
),
|
||||||
|
@ -1019,7 +1019,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => self
|
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => self
|
||||||
.tcx
|
.tcx
|
||||||
.explicit_item_super_predicates(def_id)
|
.explicit_item_self_bounds(def_id)
|
||||||
.iter_instantiated_copied(self.tcx, args)
|
.iter_instantiated_copied(self.tcx, args)
|
||||||
.find_map(|(p, s)| get_future_output(p.as_predicate(), s))?,
|
.find_map(|(p, s)| get_future_output(p.as_predicate(), s))?,
|
||||||
ty::Error(_) => return Some(ret_ty),
|
ty::Error(_) => return Some(ret_ty),
|
||||||
|
|
|
@ -1847,19 +1847,16 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
|
||||||
fcx.probe(|_| {
|
fcx.probe(|_| {
|
||||||
let ocx = ObligationCtxt::new(fcx);
|
let ocx = ObligationCtxt::new(fcx);
|
||||||
ocx.register_obligations(
|
ocx.register_obligations(
|
||||||
fcx.tcx.item_super_predicates(rpit_def_id).iter_identity().filter_map(
|
fcx.tcx.item_self_bounds(rpit_def_id).iter_identity().filter_map(|clause| {
|
||||||
|clause| {
|
|
||||||
let predicate = clause
|
let predicate = clause
|
||||||
.kind()
|
.kind()
|
||||||
.map_bound(|clause| match clause {
|
.map_bound(|clause| match clause {
|
||||||
ty::ClauseKind::Trait(trait_pred) => Some(
|
ty::ClauseKind::Trait(trait_pred) => Some(ty::ClauseKind::Trait(
|
||||||
ty::ClauseKind::Trait(trait_pred.with_self_ty(fcx.tcx, ty)),
|
trait_pred.with_self_ty(fcx.tcx, ty),
|
||||||
|
)),
|
||||||
|
ty::ClauseKind::Projection(proj_pred) => Some(
|
||||||
|
ty::ClauseKind::Projection(proj_pred.with_self_ty(fcx.tcx, ty)),
|
||||||
),
|
),
|
||||||
ty::ClauseKind::Projection(proj_pred) => {
|
|
||||||
Some(ty::ClauseKind::Projection(
|
|
||||||
proj_pred.with_self_ty(fcx.tcx, ty),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.transpose()?;
|
.transpose()?;
|
||||||
|
@ -1869,8 +1866,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
|
||||||
fcx.param_env,
|
fcx.param_env,
|
||||||
predicate,
|
predicate,
|
||||||
))
|
))
|
||||||
},
|
}),
|
||||||
),
|
|
||||||
);
|
);
|
||||||
ocx.select_where_possible().is_empty()
|
ocx.select_where_possible().is_empty()
|
||||||
})
|
})
|
||||||
|
|
|
@ -281,7 +281,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
|
||||||
alias_ty: ty::AliasTy<'tcx>,
|
alias_ty: ty::AliasTy<'tcx>,
|
||||||
) -> impl Iterator<Item = ty::Region<'tcx>> {
|
) -> impl Iterator<Item = ty::Region<'tcx>> {
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
let bounds = tcx.item_super_predicates(alias_ty.def_id);
|
let bounds = tcx.item_self_bounds(alias_ty.def_id);
|
||||||
trace!("{:#?}", bounds.skip_binder());
|
trace!("{:#?}", bounds.skip_binder());
|
||||||
bounds
|
bounds
|
||||||
.iter_instantiated(tcx, alias_ty.args)
|
.iter_instantiated(tcx, alias_ty.args)
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
interface_abi_required_feature =
|
||||||
|
target feature `{$feature}` must be {$enabled} to ensure that the ABI of the current target can be implemented correctly
|
||||||
|
.note = this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||||
|
interface_abi_required_feature_issue = for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
||||||
|
|
||||||
interface_cant_emit_mir =
|
interface_cant_emit_mir =
|
||||||
could not emit MIR: {$error}
|
could not emit MIR: {$error}
|
||||||
|
|
||||||
|
|
|
@ -103,3 +103,12 @@ pub struct IgnoringOutDir;
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(interface_multiple_output_types_to_stdout)]
|
#[diag(interface_multiple_output_types_to_stdout)]
|
||||||
pub struct MultipleOutputTypesToStdout;
|
pub struct MultipleOutputTypesToStdout;
|
||||||
|
|
||||||
|
#[derive(Diagnostic)]
|
||||||
|
#[diag(interface_abi_required_feature)]
|
||||||
|
#[note]
|
||||||
|
#[note(interface_abi_required_feature_issue)]
|
||||||
|
pub(crate) struct AbiRequiredTargetFeature<'a> {
|
||||||
|
pub feature: &'a str,
|
||||||
|
pub enabled: &'a str,
|
||||||
|
}
|
||||||
|
|
|
@ -492,6 +492,8 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
||||||
}
|
}
|
||||||
sess.lint_store = Some(Lrc::new(lint_store));
|
sess.lint_store = Some(Lrc::new(lint_store));
|
||||||
|
|
||||||
|
util::check_abi_required_features(&sess);
|
||||||
|
|
||||||
let compiler = Compiler {
|
let compiler = Compiler {
|
||||||
sess,
|
sess,
|
||||||
codegen_backend,
|
codegen_backend,
|
||||||
|
|
|
@ -18,21 +18,25 @@ use rustc_session::{EarlyDiagCtxt, Session, filesearch};
|
||||||
use rustc_span::edit_distance::find_best_match_for_name;
|
use rustc_span::edit_distance::find_best_match_for_name;
|
||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::source_map::SourceMapInputs;
|
use rustc_span::source_map::SourceMapInputs;
|
||||||
use rustc_span::sym;
|
use rustc_span::{Symbol, sym};
|
||||||
use rustc_target::spec::Target;
|
use rustc_target::spec::Target;
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
use crate::errors;
|
use crate::errors;
|
||||||
|
|
||||||
/// Function pointer type that constructs a new CodegenBackend.
|
/// Function pointer type that constructs a new CodegenBackend.
|
||||||
pub type MakeBackendFn = fn() -> Box<dyn CodegenBackend>;
|
type MakeBackendFn = fn() -> Box<dyn CodegenBackend>;
|
||||||
|
|
||||||
/// Adds `target_feature = "..."` cfgs for a variety of platform
|
/// Adds `target_feature = "..."` cfgs for a variety of platform
|
||||||
/// specific features (SSE, NEON etc.).
|
/// specific features (SSE, NEON etc.).
|
||||||
///
|
///
|
||||||
/// This is performed by checking whether a set of permitted features
|
/// This is performed by checking whether a set of permitted features
|
||||||
/// is available on the target machine, by querying the codegen backend.
|
/// is available on the target machine, by querying the codegen backend.
|
||||||
pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dyn CodegenBackend) {
|
pub(crate) fn add_configuration(
|
||||||
|
cfg: &mut Cfg,
|
||||||
|
sess: &mut Session,
|
||||||
|
codegen_backend: &dyn CodegenBackend,
|
||||||
|
) {
|
||||||
let tf = sym::target_feature;
|
let tf = sym::target_feature;
|
||||||
|
|
||||||
let unstable_target_features = codegen_backend.target_features_cfg(sess, true);
|
let unstable_target_features = codegen_backend.target_features_cfg(sess, true);
|
||||||
|
@ -48,6 +52,34 @@ pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dy
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Ensures that all target features required by the ABI are present.
|
||||||
|
/// Must be called after `unstable_target_features` has been populated!
|
||||||
|
pub(crate) fn check_abi_required_features(sess: &Session) {
|
||||||
|
let abi_feature_constraints = sess.target.abi_required_features();
|
||||||
|
// We check this against `unstable_target_features` as that is conveniently already
|
||||||
|
// back-translated to rustc feature names, taking into account `-Ctarget-cpu` and `-Ctarget-feature`.
|
||||||
|
// Just double-check that the features we care about are actually on our list.
|
||||||
|
for feature in
|
||||||
|
abi_feature_constraints.required.iter().chain(abi_feature_constraints.incompatible.iter())
|
||||||
|
{
|
||||||
|
assert!(
|
||||||
|
sess.target.rust_target_features().iter().any(|(name, ..)| feature == name),
|
||||||
|
"target feature {feature} is required/incompatible for the current ABI but not a recognized feature for this target"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
for feature in abi_feature_constraints.required {
|
||||||
|
if !sess.unstable_target_features.contains(&Symbol::intern(feature)) {
|
||||||
|
sess.dcx().emit_warn(errors::AbiRequiredTargetFeature { feature, enabled: "enabled" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for feature in abi_feature_constraints.incompatible {
|
||||||
|
if sess.unstable_target_features.contains(&Symbol::intern(feature)) {
|
||||||
|
sess.dcx().emit_warn(errors::AbiRequiredTargetFeature { feature, enabled: "disabled" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub static STACK_SIZE: OnceLock<usize> = OnceLock::new();
|
pub static STACK_SIZE: OnceLock<usize> = OnceLock::new();
|
||||||
pub const DEFAULT_STACK_SIZE: usize = 8 * 1024 * 1024;
|
pub const DEFAULT_STACK_SIZE: usize = 8 * 1024 * 1024;
|
||||||
|
|
||||||
|
|
|
@ -330,10 +330,12 @@ impl EarlyLintPass for UnsafeCode {
|
||||||
if let FnKind::Fn(
|
if let FnKind::Fn(
|
||||||
ctxt,
|
ctxt,
|
||||||
_,
|
_,
|
||||||
ast::FnSig { header: ast::FnHeader { safety: ast::Safety::Unsafe(_), .. }, .. },
|
|
||||||
_,
|
|
||||||
_,
|
_,
|
||||||
|
ast::Fn {
|
||||||
|
sig: ast::FnSig { header: ast::FnHeader { safety: ast::Safety::Unsafe(_), .. }, .. },
|
||||||
body,
|
body,
|
||||||
|
..
|
||||||
|
},
|
||||||
) = fk
|
) = fk
|
||||||
{
|
{
|
||||||
let decorator = match ctxt {
|
let decorator = match ctxt {
|
||||||
|
|
|
@ -289,10 +289,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
|
||||||
}
|
}
|
||||||
ty::Adt(def, _) => is_def_must_use(cx, def.did(), span),
|
ty::Adt(def, _) => is_def_must_use(cx, def.did(), span),
|
||||||
ty::Alias(ty::Opaque | ty::Projection, ty::AliasTy { def_id: def, .. }) => {
|
ty::Alias(ty::Opaque | ty::Projection, ty::AliasTy { def_id: def, .. }) => {
|
||||||
elaborate(
|
elaborate(cx.tcx, cx.tcx.explicit_item_self_bounds(def).iter_identity_copied())
|
||||||
cx.tcx,
|
|
||||||
cx.tcx.explicit_item_super_predicates(def).iter_identity_copied(),
|
|
||||||
)
|
|
||||||
// We only care about self bounds for the impl-trait
|
// We only care about self bounds for the impl-trait
|
||||||
.filter_only_self()
|
.filter_only_self()
|
||||||
.find_map(|(pred, _span)| {
|
.find_map(|(pred, _span)| {
|
||||||
|
|
|
@ -241,7 +241,7 @@ impl IntoArgs for (CrateNum, SimplifiedType) {
|
||||||
|
|
||||||
provide! { tcx, def_id, other, cdata,
|
provide! { tcx, def_id, other, cdata,
|
||||||
explicit_item_bounds => { table_defaulted_array }
|
explicit_item_bounds => { table_defaulted_array }
|
||||||
explicit_item_super_predicates => { table_defaulted_array }
|
explicit_item_self_bounds => { table_defaulted_array }
|
||||||
explicit_predicates_of => { table }
|
explicit_predicates_of => { table }
|
||||||
generics_of => { table }
|
generics_of => { table }
|
||||||
inferred_outlives_of => { table_defaulted_array }
|
inferred_outlives_of => { table_defaulted_array }
|
||||||
|
|
|
@ -1554,7 +1554,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
if let DefKind::OpaqueTy = def_kind {
|
if let DefKind::OpaqueTy = def_kind {
|
||||||
self.encode_explicit_item_bounds(def_id);
|
self.encode_explicit_item_bounds(def_id);
|
||||||
self.encode_explicit_item_super_predicates(def_id);
|
self.encode_explicit_item_self_bounds(def_id);
|
||||||
record!(self.tables.opaque_ty_origin[def_id] <- self.tcx.opaque_ty_origin(def_id));
|
record!(self.tables.opaque_ty_origin[def_id] <- self.tcx.opaque_ty_origin(def_id));
|
||||||
self.encode_precise_capturing_args(def_id);
|
self.encode_precise_capturing_args(def_id);
|
||||||
if tcx.is_conditionally_const(def_id) {
|
if tcx.is_conditionally_const(def_id) {
|
||||||
|
@ -1667,10 +1667,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
record_defaulted_array!(self.tables.explicit_item_bounds[def_id] <- bounds);
|
record_defaulted_array!(self.tables.explicit_item_bounds[def_id] <- bounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encode_explicit_item_super_predicates(&mut self, def_id: DefId) {
|
fn encode_explicit_item_self_bounds(&mut self, def_id: DefId) {
|
||||||
debug!("EncodeContext::encode_explicit_item_super_predicates({:?})", def_id);
|
debug!("EncodeContext::encode_explicit_item_self_bounds({:?})", def_id);
|
||||||
let bounds = self.tcx.explicit_item_super_predicates(def_id).skip_binder();
|
let bounds = self.tcx.explicit_item_self_bounds(def_id).skip_binder();
|
||||||
record_defaulted_array!(self.tables.explicit_item_super_predicates[def_id] <- bounds);
|
record_defaulted_array!(self.tables.explicit_item_self_bounds[def_id] <- bounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "debug", skip(self))]
|
#[instrument(level = "debug", skip(self))]
|
||||||
|
@ -1685,7 +1685,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
AssocItemContainer::Trait => {
|
AssocItemContainer::Trait => {
|
||||||
if let ty::AssocKind::Type = item.kind {
|
if let ty::AssocKind::Type = item.kind {
|
||||||
self.encode_explicit_item_bounds(def_id);
|
self.encode_explicit_item_bounds(def_id);
|
||||||
self.encode_explicit_item_super_predicates(def_id);
|
self.encode_explicit_item_self_bounds(def_id);
|
||||||
if tcx.is_conditionally_const(def_id) {
|
if tcx.is_conditionally_const(def_id) {
|
||||||
record_defaulted_array!(self.tables.explicit_implied_const_bounds[def_id]
|
record_defaulted_array!(self.tables.explicit_implied_const_bounds[def_id]
|
||||||
<- self.tcx.explicit_implied_const_bounds(def_id).skip_binder());
|
<- self.tcx.explicit_implied_const_bounds(def_id).skip_binder());
|
||||||
|
|
|
@ -386,7 +386,7 @@ define_tables! {
|
||||||
// corresponding DefPathHash.
|
// corresponding DefPathHash.
|
||||||
def_path_hashes: Table<DefIndex, u64>,
|
def_path_hashes: Table<DefIndex, u64>,
|
||||||
explicit_item_bounds: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
explicit_item_bounds: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||||
explicit_item_super_predicates: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
explicit_item_self_bounds: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||||
inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||||
explicit_super_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
explicit_super_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||||
explicit_implied_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
explicit_implied_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||||
|
|
|
@ -393,7 +393,7 @@ rustc_queries! {
|
||||||
/// like closure signature deduction.
|
/// like closure signature deduction.
|
||||||
///
|
///
|
||||||
/// [explicit item bounds]: Self::explicit_item_bounds
|
/// [explicit item bounds]: Self::explicit_item_bounds
|
||||||
query explicit_item_super_predicates(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
query explicit_item_self_bounds(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||||
desc { |tcx| "finding item bounds for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "finding item bounds for `{}`", tcx.def_path_str(key) }
|
||||||
cache_on_disk_if { key.is_local() }
|
cache_on_disk_if { key.is_local() }
|
||||||
separate_provide_extern
|
separate_provide_extern
|
||||||
|
@ -427,11 +427,11 @@ rustc_queries! {
|
||||||
desc { |tcx| "elaborating item bounds for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "elaborating item bounds for `{}`", tcx.def_path_str(key) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query item_super_predicates(key: DefId) -> ty::EarlyBinder<'tcx, ty::Clauses<'tcx>> {
|
query item_self_bounds(key: DefId) -> ty::EarlyBinder<'tcx, ty::Clauses<'tcx>> {
|
||||||
desc { |tcx| "elaborating item assumptions for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "elaborating item assumptions for `{}`", tcx.def_path_str(key) }
|
||||||
}
|
}
|
||||||
|
|
||||||
query item_non_self_assumptions(key: DefId) -> ty::EarlyBinder<'tcx, ty::Clauses<'tcx>> {
|
query item_non_self_bounds(key: DefId) -> ty::EarlyBinder<'tcx, ty::Clauses<'tcx>> {
|
||||||
desc { |tcx| "elaborating item assumptions for `{}`", tcx.def_path_str(key) }
|
desc { |tcx| "elaborating item assumptions for `{}`", tcx.def_path_str(key) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -345,6 +345,20 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||||
self.item_bounds(def_id).map_bound(IntoIterator::into_iter)
|
self.item_bounds(def_id).map_bound(IntoIterator::into_iter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn item_self_bounds(
|
||||||
|
self,
|
||||||
|
def_id: DefId,
|
||||||
|
) -> ty::EarlyBinder<'tcx, impl IntoIterator<Item = ty::Clause<'tcx>>> {
|
||||||
|
self.item_self_bounds(def_id).map_bound(IntoIterator::into_iter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn item_non_self_bounds(
|
||||||
|
self,
|
||||||
|
def_id: DefId,
|
||||||
|
) -> ty::EarlyBinder<'tcx, impl IntoIterator<Item = ty::Clause<'tcx>>> {
|
||||||
|
self.item_non_self_bounds(def_id).map_bound(IntoIterator::into_iter)
|
||||||
|
}
|
||||||
|
|
||||||
fn predicates_of(
|
fn predicates_of(
|
||||||
self,
|
self,
|
||||||
def_id: DefId,
|
def_id: DefId,
|
||||||
|
@ -2577,7 +2591,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = ty.kind() else { return false };
|
let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = ty.kind() else { return false };
|
||||||
let future_trait = self.require_lang_item(LangItem::Future, None);
|
let future_trait = self.require_lang_item(LangItem::Future, None);
|
||||||
|
|
||||||
self.explicit_item_super_predicates(def_id).skip_binder().iter().any(|&(predicate, _)| {
|
self.explicit_item_self_bounds(def_id).skip_binder().iter().any(|&(predicate, _)| {
|
||||||
let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() else {
|
let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||||
args: ty::GenericArgsRef<'tcx>,
|
args: ty::GenericArgsRef<'tcx>,
|
||||||
) -> &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
|
) -> &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
|
||||||
let mut bounds: Vec<_> = self
|
let mut bounds: Vec<_> = self
|
||||||
.item_super_predicates(def_id)
|
.item_self_bounds(def_id)
|
||||||
.iter_instantiated(self, args)
|
.iter_instantiated(self, args)
|
||||||
.filter_map(|clause| {
|
.filter_map(|clause| {
|
||||||
clause
|
clause
|
||||||
|
|
|
@ -19,6 +19,11 @@ use crate::solve::{
|
||||||
MaybeCause, NoSolution, QueryResult,
|
MaybeCause, NoSolution, QueryResult,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
enum AliasBoundKind {
|
||||||
|
SelfBounds,
|
||||||
|
NonSelfBounds,
|
||||||
|
}
|
||||||
|
|
||||||
/// A candidate is a possible way to prove a goal.
|
/// A candidate is a possible way to prove a goal.
|
||||||
///
|
///
|
||||||
/// It consists of both the `source`, which describes how that goal would be proven,
|
/// It consists of both the `source`, which describes how that goal would be proven,
|
||||||
|
@ -510,7 +515,12 @@ where
|
||||||
candidates: &mut Vec<Candidate<I>>,
|
candidates: &mut Vec<Candidate<I>>,
|
||||||
) {
|
) {
|
||||||
let () = self.probe(|_| ProbeKind::NormalizedSelfTyAssembly).enter(|ecx| {
|
let () = self.probe(|_| ProbeKind::NormalizedSelfTyAssembly).enter(|ecx| {
|
||||||
ecx.assemble_alias_bound_candidates_recur(goal.predicate.self_ty(), goal, candidates);
|
ecx.assemble_alias_bound_candidates_recur(
|
||||||
|
goal.predicate.self_ty(),
|
||||||
|
goal,
|
||||||
|
candidates,
|
||||||
|
AliasBoundKind::SelfBounds,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -528,6 +538,7 @@ where
|
||||||
self_ty: I::Ty,
|
self_ty: I::Ty,
|
||||||
goal: Goal<I, G>,
|
goal: Goal<I, G>,
|
||||||
candidates: &mut Vec<Candidate<I>>,
|
candidates: &mut Vec<Candidate<I>>,
|
||||||
|
consider_self_bounds: AliasBoundKind,
|
||||||
) {
|
) {
|
||||||
let (kind, alias_ty) = match self_ty.kind() {
|
let (kind, alias_ty) = match self_ty.kind() {
|
||||||
ty::Bool
|
ty::Bool
|
||||||
|
@ -580,8 +591,12 @@ where
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
for assumption in
|
match consider_self_bounds {
|
||||||
self.cx().item_bounds(alias_ty.def_id).iter_instantiated(self.cx(), alias_ty.args)
|
AliasBoundKind::SelfBounds => {
|
||||||
|
for assumption in self
|
||||||
|
.cx()
|
||||||
|
.item_self_bounds(alias_ty.def_id)
|
||||||
|
.iter_instantiated(self.cx(), alias_ty.args)
|
||||||
{
|
{
|
||||||
candidates.extend(G::probe_and_consider_implied_clause(
|
candidates.extend(G::probe_and_consider_implied_clause(
|
||||||
self,
|
self,
|
||||||
|
@ -591,6 +606,23 @@ where
|
||||||
[],
|
[],
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
AliasBoundKind::NonSelfBounds => {
|
||||||
|
for assumption in self
|
||||||
|
.cx()
|
||||||
|
.item_non_self_bounds(alias_ty.def_id)
|
||||||
|
.iter_instantiated(self.cx(), alias_ty.args)
|
||||||
|
{
|
||||||
|
candidates.extend(G::probe_and_consider_implied_clause(
|
||||||
|
self,
|
||||||
|
CandidateSource::AliasBound,
|
||||||
|
goal,
|
||||||
|
assumption,
|
||||||
|
[],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
candidates.extend(G::consider_additional_alias_assumptions(self, goal, alias_ty));
|
candidates.extend(G::consider_additional_alias_assumptions(self, goal, alias_ty));
|
||||||
|
|
||||||
|
@ -600,9 +632,12 @@ where
|
||||||
|
|
||||||
// Recurse on the self type of the projection.
|
// Recurse on the self type of the projection.
|
||||||
match self.structurally_normalize_ty(goal.param_env, alias_ty.self_ty()) {
|
match self.structurally_normalize_ty(goal.param_env, alias_ty.self_ty()) {
|
||||||
Ok(next_self_ty) => {
|
Ok(next_self_ty) => self.assemble_alias_bound_candidates_recur(
|
||||||
self.assemble_alias_bound_candidates_recur(next_self_ty, goal, candidates)
|
next_self_ty,
|
||||||
}
|
goal,
|
||||||
|
candidates,
|
||||||
|
AliasBoundKind::NonSelfBounds,
|
||||||
|
),
|
||||||
Err(NoSolution) => {}
|
Err(NoSolution) => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,11 +16,8 @@
|
||||||
#![warn(unreachable_pub)]
|
#![warn(unreachable_pub)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
use std::{iter, str, string};
|
|
||||||
|
|
||||||
pub use Alignment::*;
|
pub use Alignment::*;
|
||||||
pub use Count::*;
|
pub use Count::*;
|
||||||
pub use Piece::*;
|
|
||||||
pub use Position::*;
|
pub use Position::*;
|
||||||
use rustc_lexer::unescape;
|
use rustc_lexer::unescape;
|
||||||
|
|
||||||
|
@ -86,7 +83,7 @@ impl InnerOffset {
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Piece<'a> {
|
pub enum Piece<'a> {
|
||||||
/// A literal string which should directly be emitted
|
/// A literal string which should directly be emitted
|
||||||
String(&'a str),
|
Lit(&'a str),
|
||||||
/// This describes that formatting should process the next argument (as
|
/// This describes that formatting should process the next argument (as
|
||||||
/// specified inside) for emission.
|
/// specified inside) for emission.
|
||||||
NextArgument(Box<Argument<'a>>),
|
NextArgument(Box<Argument<'a>>),
|
||||||
|
@ -205,11 +202,11 @@ pub enum Count<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParseError {
|
pub struct ParseError {
|
||||||
pub description: string::String,
|
pub description: String,
|
||||||
pub note: Option<string::String>,
|
pub note: Option<String>,
|
||||||
pub label: string::String,
|
pub label: String,
|
||||||
pub span: InnerSpan,
|
pub span: InnerSpan,
|
||||||
pub secondary_label: Option<(string::String, InnerSpan)>,
|
pub secondary_label: Option<(String, InnerSpan)>,
|
||||||
pub suggestion: Suggestion,
|
pub suggestion: Suggestion,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,7 +222,7 @@ pub enum Suggestion {
|
||||||
/// `format!("{foo:?#}")` -> `format!("{foo:#?}")`
|
/// `format!("{foo:?#}")` -> `format!("{foo:#?}")`
|
||||||
/// `format!("{foo:?x}")` -> `format!("{foo:x?}")`
|
/// `format!("{foo:?x}")` -> `format!("{foo:x?}")`
|
||||||
/// `format!("{foo:?X}")` -> `format!("{foo:X?}")`
|
/// `format!("{foo:?X}")` -> `format!("{foo:X?}")`
|
||||||
ReorderFormatParameter(InnerSpan, string::String),
|
ReorderFormatParameter(InnerSpan, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The parser structure for interpreting the input format string. This is
|
/// The parser structure for interpreting the input format string. This is
|
||||||
|
@ -237,7 +234,7 @@ pub enum Suggestion {
|
||||||
pub struct Parser<'a> {
|
pub struct Parser<'a> {
|
||||||
mode: ParseMode,
|
mode: ParseMode,
|
||||||
input: &'a str,
|
input: &'a str,
|
||||||
cur: iter::Peekable<str::CharIndices<'a>>,
|
cur: std::iter::Peekable<std::str::CharIndices<'a>>,
|
||||||
/// Error messages accumulated during parsing
|
/// Error messages accumulated during parsing
|
||||||
pub errors: Vec<ParseError>,
|
pub errors: Vec<ParseError>,
|
||||||
/// Current position of implicit positional argument pointer
|
/// Current position of implicit positional argument pointer
|
||||||
|
@ -278,7 +275,7 @@ impl<'a> Iterator for Parser<'a> {
|
||||||
if self.consume('{') {
|
if self.consume('{') {
|
||||||
self.last_opening_brace = curr_last_brace;
|
self.last_opening_brace = curr_last_brace;
|
||||||
|
|
||||||
Some(String(self.string(pos + 1)))
|
Some(Piece::Lit(self.string(pos + 1)))
|
||||||
} else {
|
} else {
|
||||||
let arg = self.argument(lbrace_end);
|
let arg = self.argument(lbrace_end);
|
||||||
if let Some(rbrace_pos) = self.consume_closing_brace(&arg) {
|
if let Some(rbrace_pos) = self.consume_closing_brace(&arg) {
|
||||||
|
@ -299,13 +296,13 @@ impl<'a> Iterator for Parser<'a> {
|
||||||
_ => self.suggest_positional_arg_instead_of_captured_arg(arg),
|
_ => self.suggest_positional_arg_instead_of_captured_arg(arg),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Some(NextArgument(Box::new(arg)))
|
Some(Piece::NextArgument(Box::new(arg)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'}' => {
|
'}' => {
|
||||||
self.cur.next();
|
self.cur.next();
|
||||||
if self.consume('}') {
|
if self.consume('}') {
|
||||||
Some(String(self.string(pos + 1)))
|
Some(Piece::Lit(self.string(pos + 1)))
|
||||||
} else {
|
} else {
|
||||||
let err_pos = self.to_span_index(pos);
|
let err_pos = self.to_span_index(pos);
|
||||||
self.err_with_note(
|
self.err_with_note(
|
||||||
|
@ -317,7 +314,7 @@ impl<'a> Iterator for Parser<'a> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Some(String(self.string(pos))),
|
_ => Some(Piece::Lit(self.string(pos))),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if self.is_source_literal {
|
if self.is_source_literal {
|
||||||
|
@ -336,7 +333,7 @@ impl<'a> Parser<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
s: &'a str,
|
s: &'a str,
|
||||||
style: Option<usize>,
|
style: Option<usize>,
|
||||||
snippet: Option<string::String>,
|
snippet: Option<String>,
|
||||||
append_newline: bool,
|
append_newline: bool,
|
||||||
mode: ParseMode,
|
mode: ParseMode,
|
||||||
) -> Parser<'a> {
|
) -> Parser<'a> {
|
||||||
|
@ -366,7 +363,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Notifies of an error. The message doesn't actually need to be of type
|
/// Notifies of an error. The message doesn't actually need to be of type
|
||||||
/// String, but I think it does when this eventually uses conditions so it
|
/// String, but I think it does when this eventually uses conditions so it
|
||||||
/// might as well start using it now.
|
/// might as well start using it now.
|
||||||
fn err<S1: Into<string::String>, S2: Into<string::String>>(
|
fn err<S1: Into<String>, S2: Into<String>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
description: S1,
|
description: S1,
|
||||||
label: S2,
|
label: S2,
|
||||||
|
@ -385,11 +382,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Notifies of an error. The message doesn't actually need to be of type
|
/// Notifies of an error. The message doesn't actually need to be of type
|
||||||
/// String, but I think it does when this eventually uses conditions so it
|
/// String, but I think it does when this eventually uses conditions so it
|
||||||
/// might as well start using it now.
|
/// might as well start using it now.
|
||||||
fn err_with_note<
|
fn err_with_note<S1: Into<String>, S2: Into<String>, S3: Into<String>>(
|
||||||
S1: Into<string::String>,
|
|
||||||
S2: Into<string::String>,
|
|
||||||
S3: Into<string::String>,
|
|
||||||
>(
|
|
||||||
&mut self,
|
&mut self,
|
||||||
description: S1,
|
description: S1,
|
||||||
label: S2,
|
label: S2,
|
||||||
|
@ -968,7 +961,7 @@ impl<'a> Parser<'a> {
|
||||||
/// in order to properly synthesise the intra-string `Span`s for error diagnostics.
|
/// in order to properly synthesise the intra-string `Span`s for error diagnostics.
|
||||||
fn find_width_map_from_snippet(
|
fn find_width_map_from_snippet(
|
||||||
input: &str,
|
input: &str,
|
||||||
snippet: Option<string::String>,
|
snippet: Option<String>,
|
||||||
str_style: Option<usize>,
|
str_style: Option<usize>,
|
||||||
) -> InputStringKind {
|
) -> InputStringKind {
|
||||||
let snippet = match snippet {
|
let snippet = match snippet {
|
||||||
|
@ -1083,8 +1076,8 @@ fn find_width_map_from_snippet(
|
||||||
InputStringKind::Literal { width_mappings }
|
InputStringKind::Literal { width_mappings }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unescape_string(string: &str) -> Option<string::String> {
|
fn unescape_string(string: &str) -> Option<String> {
|
||||||
let mut buf = string::String::new();
|
let mut buf = String::new();
|
||||||
let mut ok = true;
|
let mut ok = true;
|
||||||
unescape::unescape_unicode(string, unescape::Mode::Str, &mut |_, unescaped_char| {
|
unescape::unescape_unicode(string, unescape::Mode::Str, &mut |_, unescaped_char| {
|
||||||
match unescaped_char {
|
match unescaped_char {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use Piece::*;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
@ -32,12 +34,12 @@ fn musterr(s: &str) {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn simple() {
|
fn simple() {
|
||||||
same("asdf", &[String("asdf")]);
|
same("asdf", &[Lit("asdf")]);
|
||||||
same("a{{b", &[String("a"), String("{b")]);
|
same("a{{b", &[Lit("a"), Lit("{b")]);
|
||||||
same("a}}b", &[String("a"), String("}b")]);
|
same("a}}b", &[Lit("a"), Lit("}b")]);
|
||||||
same("a}}", &[String("a"), String("}")]);
|
same("a}}", &[Lit("a"), Lit("}")]);
|
||||||
same("}}", &[String("}")]);
|
same("}}", &[Lit("}")]);
|
||||||
same("\\}}", &[String("\\"), String("}")]);
|
same("\\}}", &[Lit("\\"), Lit("}")]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -370,7 +372,7 @@ fn format_flags() {
|
||||||
#[test]
|
#[test]
|
||||||
fn format_mixture() {
|
fn format_mixture() {
|
||||||
same("abcd {3:x} efg", &[
|
same("abcd {3:x} efg", &[
|
||||||
String("abcd "),
|
Lit("abcd "),
|
||||||
NextArgument(Box::new(Argument {
|
NextArgument(Box::new(Argument {
|
||||||
position: ArgumentIs(3),
|
position: ArgumentIs(3),
|
||||||
position_span: InnerSpan { start: 7, end: 8 },
|
position_span: InnerSpan { start: 7, end: 8 },
|
||||||
|
@ -390,7 +392,7 @@ fn format_mixture() {
|
||||||
ty_span: None,
|
ty_span: None,
|
||||||
},
|
},
|
||||||
})),
|
})),
|
||||||
String(" efg"),
|
Lit(" efg"),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -170,9 +170,12 @@ impl<'a, 'ra, 'tcx> visit::Visitor<'a> for DefCollector<'a, 'ra, 'tcx> {
|
||||||
|
|
||||||
fn visit_fn(&mut self, fn_kind: FnKind<'a>, span: Span, _: NodeId) {
|
fn visit_fn(&mut self, fn_kind: FnKind<'a>, span: Span, _: NodeId) {
|
||||||
match fn_kind {
|
match fn_kind {
|
||||||
FnKind::Fn(_ctxt, _ident, FnSig { header, decl, span: _ }, _vis, generics, body)
|
FnKind::Fn(
|
||||||
if let Some(coroutine_kind) = header.coroutine_kind =>
|
_ctxt,
|
||||||
{
|
_ident,
|
||||||
|
_vis,
|
||||||
|
Fn { sig: FnSig { header, decl, span: _ }, generics, body, .. },
|
||||||
|
) if let Some(coroutine_kind) = header.coroutine_kind => {
|
||||||
self.visit_fn_header(header);
|
self.visit_fn_header(header);
|
||||||
self.visit_generics(generics);
|
self.visit_generics(generics);
|
||||||
|
|
||||||
|
|
|
@ -986,8 +986,8 @@ impl<'ra: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'r
|
||||||
match fn_kind {
|
match fn_kind {
|
||||||
// Bail if the function is foreign, and thus cannot validly have
|
// Bail if the function is foreign, and thus cannot validly have
|
||||||
// a body, or if there's no body for some other reason.
|
// a body, or if there's no body for some other reason.
|
||||||
FnKind::Fn(FnCtxt::Foreign, _, sig, _, generics, _)
|
FnKind::Fn(FnCtxt::Foreign, _, _, Fn { sig, generics, .. })
|
||||||
| FnKind::Fn(_, _, sig, _, generics, None) => {
|
| FnKind::Fn(_, _, _, Fn { sig, generics, body: None, .. }) => {
|
||||||
self.visit_fn_header(&sig.header);
|
self.visit_fn_header(&sig.header);
|
||||||
self.visit_generics(generics);
|
self.visit_generics(generics);
|
||||||
self.with_lifetime_rib(
|
self.with_lifetime_rib(
|
||||||
|
@ -1019,7 +1019,7 @@ impl<'ra: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'r
|
||||||
// Create a label rib for the function.
|
// Create a label rib for the function.
|
||||||
this.with_label_rib(RibKind::FnOrCoroutine, |this| {
|
this.with_label_rib(RibKind::FnOrCoroutine, |this| {
|
||||||
match fn_kind {
|
match fn_kind {
|
||||||
FnKind::Fn(_, _, sig, _, generics, body) => {
|
FnKind::Fn(_, _, _, Fn { sig, generics, body, .. }) => {
|
||||||
this.visit_generics(generics);
|
this.visit_generics(generics);
|
||||||
|
|
||||||
let declaration = &sig.decl;
|
let declaration = &sig.decl;
|
||||||
|
|
|
@ -224,7 +224,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
||||||
let suggestion = if self.current_trait_ref.is_none()
|
let suggestion = if self.current_trait_ref.is_none()
|
||||||
&& let Some((fn_kind, _)) = self.diag_metadata.current_function
|
&& let Some((fn_kind, _)) = self.diag_metadata.current_function
|
||||||
&& let Some(FnCtxt::Assoc(_)) = fn_kind.ctxt()
|
&& let Some(FnCtxt::Assoc(_)) = fn_kind.ctxt()
|
||||||
&& let FnKind::Fn(_, _, sig, ..) = fn_kind
|
&& let FnKind::Fn(_, _, _, ast::Fn { sig, .. }) = fn_kind
|
||||||
&& let Some(items) = self.diag_metadata.current_impl_items
|
&& let Some(items) = self.diag_metadata.current_impl_items
|
||||||
&& let Some(item) = items.iter().find(|i| {
|
&& let Some(item) = items.iter().find(|i| {
|
||||||
i.ident.name == item_str.name
|
i.ident.name == item_str.name
|
||||||
|
@ -560,7 +560,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
if !self.self_value_is_available(path[0].ident.span) {
|
if !self.self_value_is_available(path[0].ident.span) {
|
||||||
if let Some((FnKind::Fn(_, _, sig, ..), fn_span)) =
|
if let Some((FnKind::Fn(_, _, _, ast::Fn { sig, .. }), fn_span)) =
|
||||||
&self.diag_metadata.current_function
|
&self.diag_metadata.current_function
|
||||||
{
|
{
|
||||||
let (span, sugg) = if let Some(param) = sig.decl.inputs.get(0) {
|
let (span, sugg) = if let Some(param) = sig.decl.inputs.get(0) {
|
||||||
|
@ -3249,7 +3249,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
||||||
{
|
{
|
||||||
let pre = if lt.kind == MissingLifetimeKind::Ampersand
|
let pre = if lt.kind == MissingLifetimeKind::Ampersand
|
||||||
&& let Some((kind, _span)) = self.diag_metadata.current_function
|
&& let Some((kind, _span)) = self.diag_metadata.current_function
|
||||||
&& let FnKind::Fn(_, _, sig, _, _, _) = kind
|
&& let FnKind::Fn(_, _, _, ast::Fn { sig, .. }) = kind
|
||||||
&& !sig.decl.inputs.is_empty()
|
&& !sig.decl.inputs.is_empty()
|
||||||
&& let sugg = sig
|
&& let sugg = sig
|
||||||
.decl
|
.decl
|
||||||
|
@ -3290,7 +3290,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
||||||
} else if (lt.kind == MissingLifetimeKind::Ampersand
|
} else if (lt.kind == MissingLifetimeKind::Ampersand
|
||||||
|| lt.kind == MissingLifetimeKind::Underscore)
|
|| lt.kind == MissingLifetimeKind::Underscore)
|
||||||
&& let Some((kind, _span)) = self.diag_metadata.current_function
|
&& let Some((kind, _span)) = self.diag_metadata.current_function
|
||||||
&& let FnKind::Fn(_, _, sig, _, _, _) = kind
|
&& let FnKind::Fn(_, _, _, ast::Fn { sig, .. }) = kind
|
||||||
&& let ast::FnRetTy::Ty(ret_ty) = &sig.decl.output
|
&& let ast::FnRetTy::Ty(ret_ty) = &sig.decl.output
|
||||||
&& !sig.decl.inputs.is_empty()
|
&& !sig.decl.inputs.is_empty()
|
||||||
&& let arg_refs = sig
|
&& let arg_refs = sig
|
||||||
|
@ -3350,7 +3350,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
||||||
let mut owned_sugg = lt.kind == MissingLifetimeKind::Ampersand;
|
let mut owned_sugg = lt.kind == MissingLifetimeKind::Ampersand;
|
||||||
let mut sugg = vec![(lt.span, String::new())];
|
let mut sugg = vec![(lt.span, String::new())];
|
||||||
if let Some((kind, _span)) = self.diag_metadata.current_function
|
if let Some((kind, _span)) = self.diag_metadata.current_function
|
||||||
&& let FnKind::Fn(_, _, sig, _, _, _) = kind
|
&& let FnKind::Fn(_, _, _, ast::Fn { sig, .. }) = kind
|
||||||
&& let ast::FnRetTy::Ty(ty) = &sig.decl.output
|
&& let ast::FnRetTy::Ty(ty) = &sig.decl.output
|
||||||
{
|
{
|
||||||
let mut lt_finder =
|
let mut lt_finder =
|
||||||
|
|
|
@ -108,21 +108,19 @@ impl Stability {
|
||||||
// per-function level, since we would then allow safe calls from functions with `+soft-float` to
|
// per-function level, since we would then allow safe calls from functions with `+soft-float` to
|
||||||
// functions without that feature!
|
// functions without that feature!
|
||||||
//
|
//
|
||||||
// It is important for soundness that features allowed here do *not* change the function call ABI.
|
// It is important for soundness to consider the interaction of targets features and the function
|
||||||
// For example, disabling the `x87` feature on x86 changes how scalar floats are passed as
|
// call ABI. For example, disabling the `x87` feature on x86 changes how scalar floats are passed as
|
||||||
// arguments, so enabling toggling that feature would be unsound. In fact, since `-Ctarget-feature`
|
// arguments, so letting people toggle that feature would be unsound. To this end, the
|
||||||
// will just allow unknown features (with a warning), we have to explicitly list features that change
|
// `abi_required_features` function computes which target features must and must not be enabled for
|
||||||
// the ABI as `Forbidden` to ensure using them causes an error. Note that this is only effective if
|
// any given target, and individual features can also be marked as `Forbidden`.
|
||||||
// such features can never be toggled via `-Ctarget-cpu`! If that is ever a possibility, we will need
|
// See https://github.com/rust-lang/rust/issues/116344 for some more context.
|
||||||
// extra checks ensuring that the LLVM-computed target features for a CPU did not (un)set a
|
|
||||||
// `Forbidden` feature. See https://github.com/rust-lang/rust/issues/116344 for some more context.
|
|
||||||
// FIXME: add such "forbidden" features for non-x86 targets.
|
|
||||||
//
|
//
|
||||||
// The one exception to features that change the ABI is features that enable larger vector
|
// The one exception to features that change the ABI is features that enable larger vector
|
||||||
// registers. Those are permitted to be listed here. This is currently unsound (see
|
// registers. Those are permitted to be listed here. The `*_FOR_CORRECT_VECTOR_ABI` arrays store
|
||||||
// https://github.com/rust-lang/rust/issues/116558); in the future we will have to ensure that
|
// information about which target feature is ABI-required for which vector size; this is used to
|
||||||
// functions can only use such vectors as arguments/return types if the corresponding target feature
|
// ensure that vectors can only be passed via `extern "C"` when the right feature is enabled. (For
|
||||||
// is enabled.
|
// the "Rust" ABI we generally pass vectors by-ref exactly to avoid these issues.)
|
||||||
|
// Also see https://github.com/rust-lang/rust/issues/116558.
|
||||||
//
|
//
|
||||||
// Stabilizing a target feature requires t-lang approval.
|
// Stabilizing a target feature requires t-lang approval.
|
||||||
|
|
||||||
|
|
|
@ -196,7 +196,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
||||||
let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0];
|
let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0];
|
||||||
|
|
||||||
self.tcx
|
self.tcx
|
||||||
.explicit_item_super_predicates(def_id)
|
.explicit_item_self_bounds(def_id)
|
||||||
.iter_instantiated_copied(self.tcx, args)
|
.iter_instantiated_copied(self.tcx, args)
|
||||||
.find_map(|(predicate, _)| {
|
.find_map(|(predicate, _)| {
|
||||||
predicate
|
predicate
|
||||||
|
|
|
@ -293,7 +293,7 @@ impl<T> Trait<T> for X {
|
||||||
(ty::Dynamic(t, _, ty::DynKind::Dyn), ty::Alias(ty::Opaque, alias))
|
(ty::Dynamic(t, _, ty::DynKind::Dyn), ty::Alias(ty::Opaque, alias))
|
||||||
if let Some(def_id) = t.principal_def_id()
|
if let Some(def_id) = t.principal_def_id()
|
||||||
&& tcx
|
&& tcx
|
||||||
.explicit_item_super_predicates(alias.def_id)
|
.explicit_item_self_bounds(alias.def_id)
|
||||||
.skip_binder()
|
.skip_binder()
|
||||||
.iter()
|
.iter()
|
||||||
.any(|(pred, _span)| match pred.kind().skip_binder() {
|
.any(|(pred, _span)| match pred.kind().skip_binder() {
|
||||||
|
@ -422,7 +422,7 @@ impl<T> Trait<T> for X {
|
||||||
ty::Alias(..) => values.expected,
|
ty::Alias(..) => values.expected,
|
||||||
_ => values.found,
|
_ => values.found,
|
||||||
};
|
};
|
||||||
let preds = tcx.explicit_item_super_predicates(opaque_ty.def_id);
|
let preds = tcx.explicit_item_self_bounds(opaque_ty.def_id);
|
||||||
for (pred, _span) in preds.skip_binder() {
|
for (pred, _span) in preds.skip_binder() {
|
||||||
let ty::ClauseKind::Trait(trait_predicate) = pred.kind().skip_binder()
|
let ty::ClauseKind::Trait(trait_predicate) = pred.kind().skip_binder()
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -799,7 +799,7 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
let mut result = Ok(());
|
let mut result = Ok(());
|
||||||
for token in &mut parser {
|
for token in &mut parser {
|
||||||
match token {
|
match token {
|
||||||
Piece::String(_) => (), // Normal string, no need to check it
|
Piece::Lit(_) => (), // Normal string, no need to check it
|
||||||
Piece::NextArgument(a) => {
|
Piece::NextArgument(a) => {
|
||||||
let format_spec = a.format;
|
let format_spec = a.format;
|
||||||
if self.is_diagnostic_namespace_variant
|
if self.is_diagnostic_namespace_variant
|
||||||
|
@ -950,7 +950,7 @@ impl<'tcx> OnUnimplementedFormatString {
|
||||||
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
|
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
|
||||||
let constructed_message = (&mut parser)
|
let constructed_message = (&mut parser)
|
||||||
.map(|p| match p {
|
.map(|p| match p {
|
||||||
Piece::String(s) => s.to_owned(),
|
Piece::Lit(s) => s.to_owned(),
|
||||||
Piece::NextArgument(a) => match a.position {
|
Piece::NextArgument(a) => match a.position {
|
||||||
Position::ArgumentNamed(arg) => {
|
Position::ArgumentNamed(arg) => {
|
||||||
let s = Symbol::intern(arg);
|
let s = Symbol::intern(arg);
|
||||||
|
|
|
@ -1087,12 +1087,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
||||||
sig_parts.map_bound(|sig| sig.tupled_inputs_ty.tuple_fields().as_slice()),
|
sig_parts.map_bound(|sig| sig.tupled_inputs_ty.tuple_fields().as_slice()),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => self
|
ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
|
||||||
.tcx
|
self.tcx.item_self_bounds(def_id).instantiate(self.tcx, args).iter().find_map(
|
||||||
.item_super_predicates(def_id)
|
|pred| {
|
||||||
.instantiate(self.tcx, args)
|
|
||||||
.iter()
|
|
||||||
.find_map(|pred| {
|
|
||||||
if let ty::ClauseKind::Projection(proj) = pred.kind().skip_binder()
|
if let ty::ClauseKind::Projection(proj) = pred.kind().skip_binder()
|
||||||
&& self
|
&& self
|
||||||
.tcx
|
.tcx
|
||||||
|
@ -1108,7 +1105,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}),
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
ty::Dynamic(data, _, ty::Dyn) => data.iter().find_map(|pred| {
|
ty::Dynamic(data, _, ty::Dyn) => data.iter().find_map(|pred| {
|
||||||
if let ty::ExistentialPredicate::Projection(proj) = pred.skip_binder()
|
if let ty::ExistentialPredicate::Projection(proj) = pred.skip_binder()
|
||||||
&& self.tcx.is_lang_item(proj.def_id, LangItem::FnOnceOutput)
|
&& self.tcx.is_lang_item(proj.def_id, LangItem::FnOnceOutput)
|
||||||
|
|
|
@ -1620,9 +1620,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
// projections, we will never be able to equate, e.g. `<T as Tr>::A`
|
// projections, we will never be able to equate, e.g. `<T as Tr>::A`
|
||||||
// with `<<T as Tr>::A as Tr>::A`.
|
// with `<<T as Tr>::A as Tr>::A`.
|
||||||
let relevant_bounds = if in_parent_alias_type {
|
let relevant_bounds = if in_parent_alias_type {
|
||||||
self.tcx().item_non_self_assumptions(alias_ty.def_id)
|
self.tcx().item_non_self_bounds(alias_ty.def_id)
|
||||||
} else {
|
} else {
|
||||||
self.tcx().item_super_predicates(alias_ty.def_id)
|
self.tcx().item_self_bounds(alias_ty.def_id)
|
||||||
};
|
};
|
||||||
|
|
||||||
for bound in relevant_bounds.instantiate(self.tcx(), alias_ty.args) {
|
for bound in relevant_bounds.instantiate(self.tcx(), alias_ty.args) {
|
||||||
|
|
|
@ -203,6 +203,16 @@ pub trait Interner:
|
||||||
def_id: Self::DefId,
|
def_id: Self::DefId,
|
||||||
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;
|
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;
|
||||||
|
|
||||||
|
fn item_self_bounds(
|
||||||
|
self,
|
||||||
|
def_id: Self::DefId,
|
||||||
|
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;
|
||||||
|
|
||||||
|
fn item_non_self_bounds(
|
||||||
|
self,
|
||||||
|
def_id: Self::DefId,
|
||||||
|
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;
|
||||||
|
|
||||||
fn predicates_of(
|
fn predicates_of(
|
||||||
self,
|
self,
|
||||||
def_id: Self::DefId,
|
def_id: Self::DefId,
|
||||||
|
|
|
@ -313,6 +313,17 @@ pub macro cfg_match {
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
|
///
|
||||||
|
/// If desired, it is possible to return expressions through the use of surrounding braces:
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// #![feature(cfg_match)]
|
||||||
|
///
|
||||||
|
/// let _some_string = cfg_match! {{
|
||||||
|
/// unix => { "With great power comes great electricity bills" }
|
||||||
|
/// _ => { "Behind every successful diet is an unwatched pizza" }
|
||||||
|
/// }};
|
||||||
|
/// ```
|
||||||
#[cfg(not(bootstrap))]
|
#[cfg(not(bootstrap))]
|
||||||
#[unstable(feature = "cfg_match", issue = "115585")]
|
#[unstable(feature = "cfg_match", issue = "115585")]
|
||||||
#[rustc_diagnostic_item = "cfg_match"]
|
#[rustc_diagnostic_item = "cfg_match"]
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 82a4a49789bc96db1a1b2a210b4c5ed7c9ef0c0d
|
Subproject commit fa312a343fbff01bc6cef393e326817f70719813
|
|
@ -1 +1 @@
|
||||||
Subproject commit d56e0f3a0656b7702ca466d4b191e16c28262b82
|
Subproject commit 4ed5a1a4a2a7ecc2e529a5baaef04f7bc7917eda
|
|
@ -1 +1 @@
|
||||||
Subproject commit 625b200e5b33a5af35589db0bc454203a3d46d20
|
Subproject commit bc2298865544695c63454fc1f9f98a3dc22e9948
|
|
@ -1 +1 @@
|
||||||
Subproject commit 293af991003772bdccf2d6b980182d84dd055942
|
Subproject commit 93b921c7d3213d38d920f7f905a3bec093d2217d
|
|
@ -79,7 +79,7 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend {
|
||||||
&& let Some(future_trait) = cx.tcx.lang_items().future_trait()
|
&& let Some(future_trait) = cx.tcx.lang_items().future_trait()
|
||||||
&& let Some(send_trait) = cx.tcx.get_diagnostic_item(sym::Send)
|
&& let Some(send_trait) = cx.tcx.get_diagnostic_item(sym::Send)
|
||||||
{
|
{
|
||||||
let preds = cx.tcx.explicit_item_super_predicates(def_id);
|
let preds = cx.tcx.explicit_item_self_bounds(def_id);
|
||||||
let is_future = preds.iter_instantiated_copied(cx.tcx, args).any(|(p, _)| {
|
let is_future = preds.iter_instantiated_copied(cx.tcx, args).any(|(p, _)| {
|
||||||
p.as_trait_clause()
|
p.as_trait_clause()
|
||||||
.is_some_and(|trait_pred| trait_pred.skip_binder().trait_ref.def_id == future_trait)
|
.is_some_and(|trait_pred| trait_pred.skip_binder().trait_ref.def_id == future_trait)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use rustc_ast::visit::FnKind;
|
use rustc_ast::visit::FnKind;
|
||||||
use rustc_ast::{NodeId, WherePredicateKind};
|
use rustc_ast::{Fn, NodeId, WherePredicateKind};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_lint::{EarlyContext, EarlyLintPass};
|
use rustc_lint::{EarlyContext, EarlyLintPass};
|
||||||
use rustc_session::declare_lint_pass;
|
use rustc_session::declare_lint_pass;
|
||||||
|
@ -39,7 +39,7 @@ declare_lint_pass!(MultipleBoundLocations => [MULTIPLE_BOUND_LOCATIONS]);
|
||||||
|
|
||||||
impl EarlyLintPass for MultipleBoundLocations {
|
impl EarlyLintPass for MultipleBoundLocations {
|
||||||
fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, _: Span, _: NodeId) {
|
fn check_fn(&mut self, cx: &EarlyContext<'_>, kind: FnKind<'_>, _: Span, _: NodeId) {
|
||||||
if let FnKind::Fn(_, _, _, _, generics, _) = kind
|
if let FnKind::Fn(_, _, _, Fn { generics, .. }) = kind
|
||||||
&& !generics.params.is_empty()
|
&& !generics.params.is_empty()
|
||||||
&& !generics.where_clause.predicates.is_empty()
|
&& !generics.where_clause.predicates.is_empty()
|
||||||
{
|
{
|
||||||
|
|
|
@ -96,7 +96,7 @@ pub fn contains_ty_adt_constructor_opaque<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (predicate, _span) in cx.tcx.explicit_item_super_predicates(def_id).iter_identity_copied() {
|
for (predicate, _span) in cx.tcx.explicit_item_self_bounds(def_id).iter_identity_copied() {
|
||||||
match predicate.kind().skip_binder() {
|
match predicate.kind().skip_binder() {
|
||||||
// For `impl Trait<U>`, it will register a predicate of `T: Trait<U>`, so we go through
|
// For `impl Trait<U>`, it will register a predicate of `T: Trait<U>`, so we go through
|
||||||
// and check substitutions to find `U`.
|
// and check substitutions to find `U`.
|
||||||
|
@ -322,7 +322,7 @@ pub fn is_must_use_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
|
||||||
},
|
},
|
||||||
ty::Tuple(args) => args.iter().any(|ty| is_must_use_ty(cx, ty)),
|
ty::Tuple(args) => args.iter().any(|ty| is_must_use_ty(cx, ty)),
|
||||||
ty::Alias(ty::Opaque, AliasTy { def_id, .. }) => {
|
ty::Alias(ty::Opaque, AliasTy { def_id, .. }) => {
|
||||||
for (predicate, _) in cx.tcx.explicit_item_super_predicates(def_id).skip_binder() {
|
for (predicate, _) in cx.tcx.explicit_item_self_bounds(def_id).skip_binder() {
|
||||||
if let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() {
|
if let ty::ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder() {
|
||||||
if cx.tcx.has_attr(trait_predicate.trait_ref.def_id, sym::must_use) {
|
if cx.tcx.has_attr(trait_predicate.trait_ref.def_id, sym::must_use) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -712,7 +712,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t
|
||||||
ty::Alias(ty::Opaque, AliasTy { def_id, args, .. }) => sig_from_bounds(
|
ty::Alias(ty::Opaque, AliasTy { def_id, args, .. }) => sig_from_bounds(
|
||||||
cx,
|
cx,
|
||||||
ty,
|
ty,
|
||||||
cx.tcx.item_super_predicates(def_id).iter_instantiated(cx.tcx, args),
|
cx.tcx.item_self_bounds(def_id).iter_instantiated(cx.tcx, args),
|
||||||
cx.tcx.opt_parent(def_id),
|
cx.tcx.opt_parent(def_id),
|
||||||
),
|
),
|
||||||
ty::FnPtr(sig_tys, hdr) => Some(ExprFnSig::Sig(sig_tys.with(hdr), None)),
|
ty::FnPtr(sig_tys, hdr) => Some(ExprFnSig::Sig(sig_tys.with(hdr), None)),
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
// We're testing x86 target specific features
|
// We're testing x86-32 target specific features. SSE always exists on x86-64.
|
||||||
//@only-target: x86_64 i686
|
//@only-target: i686
|
||||||
//@compile-flags: -C target-feature=-sse2
|
//@compile-flags: -C target-feature=-sse2
|
||||||
|
|
||||||
#[cfg(target_arch = "x86")]
|
#[cfg(target_arch = "x86")]
|
||||||
|
|
|
@ -229,7 +229,7 @@ impl ExprCollector<'_> {
|
||||||
};
|
};
|
||||||
for piece in unverified_pieces {
|
for piece in unverified_pieces {
|
||||||
match piece {
|
match piece {
|
||||||
rustc_parse_format::Piece::String(_) => {}
|
rustc_parse_format::Piece::Lit(_) => {}
|
||||||
rustc_parse_format::Piece::NextArgument(arg) => {
|
rustc_parse_format::Piece::NextArgument(arg) => {
|
||||||
// let span = arg_spans.next();
|
// let span = arg_spans.next();
|
||||||
|
|
||||||
|
|
|
@ -287,7 +287,7 @@ pub(crate) fn parse(
|
||||||
|
|
||||||
for piece in pieces {
|
for piece in pieces {
|
||||||
match piece {
|
match piece {
|
||||||
parse::Piece::String(s) => {
|
parse::Piece::Lit(s) => {
|
||||||
unfinished_literal.push_str(s);
|
unfinished_literal.push_str(s);
|
||||||
}
|
}
|
||||||
parse::Piece::NextArgument(arg) => {
|
parse::Piece::NextArgument(arg) => {
|
||||||
|
|
|
@ -333,19 +333,19 @@ impl<'a> FnSig<'a> {
|
||||||
defaultness: ast::Defaultness,
|
defaultness: ast::Defaultness,
|
||||||
) -> FnSig<'a> {
|
) -> FnSig<'a> {
|
||||||
match *fn_kind {
|
match *fn_kind {
|
||||||
visit::FnKind::Fn(visit::FnCtxt::Assoc(..), _, fn_sig, vis, generics, _) => {
|
visit::FnKind::Fn(visit::FnCtxt::Assoc(..), _, vis, ast::Fn { sig, generics, .. }) => {
|
||||||
let mut fn_sig = FnSig::from_method_sig(fn_sig, generics, vis);
|
let mut fn_sig = FnSig::from_method_sig(sig, generics, vis);
|
||||||
fn_sig.defaultness = defaultness;
|
fn_sig.defaultness = defaultness;
|
||||||
fn_sig
|
fn_sig
|
||||||
}
|
}
|
||||||
visit::FnKind::Fn(_, _, fn_sig, vis, generics, _) => FnSig {
|
visit::FnKind::Fn(_, _, vis, ast::Fn { sig, generics, .. }) => FnSig {
|
||||||
decl,
|
decl,
|
||||||
generics,
|
generics,
|
||||||
ext: fn_sig.header.ext,
|
ext: sig.header.ext,
|
||||||
constness: fn_sig.header.constness,
|
constness: sig.header.constness,
|
||||||
coroutine_kind: Cow::Borrowed(&fn_sig.header.coroutine_kind),
|
coroutine_kind: Cow::Borrowed(&sig.header.coroutine_kind),
|
||||||
defaultness,
|
defaultness,
|
||||||
safety: fn_sig.header.safety,
|
safety: sig.header.safety,
|
||||||
visibility: vis,
|
visibility: vis,
|
||||||
},
|
},
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -3453,6 +3453,7 @@ impl Rewrite for ast::ForeignItem {
|
||||||
ref sig,
|
ref sig,
|
||||||
ref generics,
|
ref generics,
|
||||||
ref body,
|
ref body,
|
||||||
|
..
|
||||||
} = **fn_kind;
|
} = **fn_kind;
|
||||||
if body.is_some() {
|
if body.is_some() {
|
||||||
let mut visitor = FmtVisitor::from_context(context);
|
let mut visitor = FmtVisitor::from_context(context);
|
||||||
|
@ -3461,7 +3462,7 @@ impl Rewrite for ast::ForeignItem {
|
||||||
let inner_attrs = inner_attributes(&self.attrs);
|
let inner_attrs = inner_attributes(&self.attrs);
|
||||||
let fn_ctxt = visit::FnCtxt::Foreign;
|
let fn_ctxt = visit::FnCtxt::Foreign;
|
||||||
visitor.visit_fn(
|
visitor.visit_fn(
|
||||||
visit::FnKind::Fn(fn_ctxt, &self.ident, sig, &self.vis, generics, body),
|
visit::FnKind::Fn(fn_ctxt, &self.ident, &self.vis, fn_kind),
|
||||||
&sig.decl,
|
&sig.decl,
|
||||||
self.span,
|
self.span,
|
||||||
defaultness,
|
defaultness,
|
||||||
|
|
|
@ -386,7 +386,14 @@ impl<'b, 'a: 'b> FmtVisitor<'a> {
|
||||||
let indent = self.block_indent;
|
let indent = self.block_indent;
|
||||||
let block;
|
let block;
|
||||||
let rewrite = match fk {
|
let rewrite = match fk {
|
||||||
visit::FnKind::Fn(_, ident, _, _, _, Some(ref b)) => {
|
visit::FnKind::Fn(
|
||||||
|
_,
|
||||||
|
ident,
|
||||||
|
_,
|
||||||
|
ast::Fn {
|
||||||
|
body: Some(ref b), ..
|
||||||
|
},
|
||||||
|
) => {
|
||||||
block = b;
|
block = b;
|
||||||
self.rewrite_fn_before_block(
|
self.rewrite_fn_before_block(
|
||||||
indent,
|
indent,
|
||||||
|
@ -539,6 +546,7 @@ impl<'b, 'a: 'b> FmtVisitor<'a> {
|
||||||
ref sig,
|
ref sig,
|
||||||
ref generics,
|
ref generics,
|
||||||
ref body,
|
ref body,
|
||||||
|
..
|
||||||
} = **fn_kind;
|
} = **fn_kind;
|
||||||
if body.is_some() {
|
if body.is_some() {
|
||||||
let inner_attrs = inner_attributes(&item.attrs);
|
let inner_attrs = inner_attributes(&item.attrs);
|
||||||
|
@ -547,7 +555,7 @@ impl<'b, 'a: 'b> FmtVisitor<'a> {
|
||||||
_ => visit::FnCtxt::Foreign,
|
_ => visit::FnCtxt::Foreign,
|
||||||
};
|
};
|
||||||
self.visit_fn(
|
self.visit_fn(
|
||||||
visit::FnKind::Fn(fn_ctxt, &item.ident, sig, &item.vis, generics, body),
|
visit::FnKind::Fn(fn_ctxt, &item.ident, &item.vis, fn_kind),
|
||||||
&sig.decl,
|
&sig.decl,
|
||||||
item.span,
|
item.span,
|
||||||
defaultness,
|
defaultness,
|
||||||
|
@ -640,12 +648,13 @@ impl<'b, 'a: 'b> FmtVisitor<'a> {
|
||||||
ref sig,
|
ref sig,
|
||||||
ref generics,
|
ref generics,
|
||||||
ref body,
|
ref body,
|
||||||
|
..
|
||||||
} = **fn_kind;
|
} = **fn_kind;
|
||||||
if body.is_some() {
|
if body.is_some() {
|
||||||
let inner_attrs = inner_attributes(&ai.attrs);
|
let inner_attrs = inner_attributes(&ai.attrs);
|
||||||
let fn_ctxt = visit::FnCtxt::Assoc(assoc_ctxt);
|
let fn_ctxt = visit::FnCtxt::Assoc(assoc_ctxt);
|
||||||
self.visit_fn(
|
self.visit_fn(
|
||||||
visit::FnKind::Fn(fn_ctxt, &ai.ident, sig, &ai.vis, generics, body),
|
visit::FnKind::Fn(fn_ctxt, &ai.ident, &ai.vis, fn_kind),
|
||||||
&sig.decl,
|
&sig.decl,
|
||||||
ai.span,
|
ai.span,
|
||||||
defaultness,
|
defaultness,
|
||||||
|
|
|
@ -39,8 +39,8 @@ pub unsafe fn banana() -> u32 {
|
||||||
}
|
}
|
||||||
|
|
||||||
// CHECK: attributes [[APPLEATTRS]]
|
// CHECK: attributes [[APPLEATTRS]]
|
||||||
// COMPAT-SAME: "target-features"="+x87,+sse2,+avx,+avx2,{{.*}}"
|
// COMPAT-SAME: "target-features"="+avx,+avx2,{{.*}}"
|
||||||
// INCOMPAT-SAME: "target-features"="+x87,+sse2,-avx2,-avx,+avx,{{.*}}"
|
// INCOMPAT-SAME: "target-features"="-avx2,-avx,+avx,{{.*}}"
|
||||||
// CHECK: attributes [[BANANAATTRS]]
|
// CHECK: attributes [[BANANAATTRS]]
|
||||||
// COMPAT-SAME: "target-features"="+x87,+sse2,+avx,+avx2,{{.*}}"
|
// COMPAT-SAME: "target-features"="+avx,+avx2,{{.*}}"
|
||||||
// INCOMPAT-SAME: "target-features"="+x87,+sse2,-avx2,-avx"
|
// INCOMPAT-SAME: "target-features"="-avx2,-avx"
|
||||||
|
|
|
@ -11,11 +11,10 @@
|
||||||
// ENABLE_SVE: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(\+sve,?)|(\+neon,?)|(\+fp-armv8,?))*}}" }
|
// ENABLE_SVE: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(\+sve,?)|(\+neon,?)|(\+fp-armv8,?))*}}" }
|
||||||
|
|
||||||
//@ [DISABLE_SVE] compile-flags: -C target-feature=-sve -Copt-level=0
|
//@ [DISABLE_SVE] compile-flags: -C target-feature=-sve -Copt-level=0
|
||||||
// DISABLE_SVE: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(-sve,?)|(\+neon,?)|(\+fp-armv8,?))*}}" }
|
// DISABLE_SVE: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(-sve,?)|(\+neon,?))*}}" }
|
||||||
|
|
||||||
//@ [DISABLE_NEON] compile-flags: -C target-feature=-neon -Copt-level=0
|
//@ [DISABLE_NEON] compile-flags: -C target-feature=-neon -Copt-level=0
|
||||||
// `neon` and `fp-armv8` get enabled as target base features, but then disabled again at the end of the list.
|
// DISABLE_NEON: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(-fp-armv8,?)|(-neon,?))*}}" }
|
||||||
// DISABLE_NEON: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fp-armv8,?)|(\+neon,?))*}},-neon,-fp-armv8{{(,\+fpmr)?}}" }
|
|
||||||
|
|
||||||
//@ [ENABLE_NEON] compile-flags: -C target-feature=+neon -Copt-level=0
|
//@ [ENABLE_NEON] compile-flags: -C target-feature=+neon -Copt-level=0
|
||||||
// ENABLE_NEON: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(\+fp-armv8,?)|(\+neon,?))*}}" }
|
// ENABLE_NEON: attributes #0 = { {{.*}} "target-features"="{{((\+outline-atomics,?)|(\+v8a,?)|(\+fpmr,?)?|(\+fp-armv8,?)|(\+neon,?))*}}" }
|
||||||
|
|
|
@ -6,6 +6,10 @@
|
||||||
//@ normalize-stdout: "libthe_backend.dylib" -> "libthe_backend.so"
|
//@ normalize-stdout: "libthe_backend.dylib" -> "libthe_backend.so"
|
||||||
//@ normalize-stdout: "the_backend.dll" -> "libthe_backend.so"
|
//@ normalize-stdout: "the_backend.dll" -> "libthe_backend.so"
|
||||||
|
|
||||||
|
// Pick a target that requires no target features, so that no warning is shown
|
||||||
|
// about missing target features.
|
||||||
|
//@ compile-flags: --target arm-unknown-linux-gnueabi
|
||||||
|
//@ needs-llvm-components: arm
|
||||||
//@ revisions: normal dep bindep
|
//@ revisions: normal dep bindep
|
||||||
//@ compile-flags: --crate-type=lib
|
//@ compile-flags: --crate-type=lib
|
||||||
//@ [normal] compile-flags: --emit=link=-
|
//@ [normal] compile-flags: --emit=link=-
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
error[E0382]: use of moved value: `x`
|
error[E0382]: use of moved value: `x`
|
||||||
--> $DIR/cant-see-copy-bound-from-child-rigid.rs:14:9
|
--> $DIR/cant-see-copy-bound-from-child-rigid.rs:18:9
|
||||||
|
|
|
|
||||||
LL | fn foo<T: Trait>(x: T::Assoc) -> (T::Assoc, T::Assoc)
|
LL | fn foo<T: Trait>(x: T::Assoc) -> (T::Assoc, T::Assoc)
|
||||||
| - move occurs because `x` has type `<T as Trait>::Assoc`, which does not implement the `Copy` trait
|
| - move occurs because `x` has type `<T as Trait>::Assoc`, which does not implement the `Copy` trait
|
|
@ -0,0 +1,14 @@
|
||||||
|
error[E0382]: use of moved value: `x`
|
||||||
|
--> $DIR/cant-see-copy-bound-from-child-rigid.rs:18:9
|
||||||
|
|
|
||||||
|
LL | fn foo<T: Trait>(x: T::Assoc) -> (T::Assoc, T::Assoc)
|
||||||
|
| - move occurs because `x` has type `<T as Trait>::Assoc`, which does not implement the `Copy` trait
|
||||||
|
...
|
||||||
|
LL | (x, x)
|
||||||
|
| - ^ value used here after move
|
||||||
|
| |
|
||||||
|
| value moved here
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0382`.
|
|
@ -1,3 +1,7 @@
|
||||||
|
//@ revisions: current next
|
||||||
|
//@ ignore-compare-mode-next-solver (explicit revisions)
|
||||||
|
//@[next] compile-flags: -Znext-solver
|
||||||
|
|
||||||
trait Id {
|
trait Id {
|
||||||
type This: ?Sized;
|
type This: ?Sized;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
warning: target feature `neon` cannot be disabled with `-Ctarget-feature`: this feature is required by the target ABI
|
|
||||||
|
|
|
||||||
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
|
||||||
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
|
||||||
|
|
||||||
warning: 1 warning emitted
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
warning: target feature `sse` cannot be disabled with `-Ctarget-feature`: this feature is required by the target ABI
|
warning: target feature `sse2` must be enabled to ensure that the ABI of the current target can be implemented correctly
|
||||||
|
|
|
|
||||||
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||||
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
warning: target feature `neon` cannot be disabled with `-Ctarget-feature`: this feature is required by the target ABI
|
warning: target feature `neon` must be enabled to ensure that the ABI of the current target can be implemented correctly
|
||||||
|
|
|
|
||||||
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||||
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
warning: unstable feature specified for `-Ctarget-feature`: `x87`
|
warning: target feature `x87` must be enabled to ensure that the ABI of the current target can be implemented correctly
|
||||||
|
|
|
||||||
= note: this feature is not stably supported; its behavior can change in the future
|
|
||||||
|
|
||||||
warning: target feature `x87` cannot be disabled with `-Ctarget-feature`: this feature is required by the target ABI
|
|
||||||
|
|
|
|
||||||
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
= note: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||||
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
= note: for more information, see issue #116344 <https://github.com/rust-lang/rust/issues/116344>
|
||||||
|
|
||||||
|
warning: unstable feature specified for `-Ctarget-feature`: `x87`
|
||||||
|
|
|
||||||
|
= note: this feature is not stably supported; its behavior can change in the future
|
||||||
|
|
||||||
warning: 2 warnings emitted
|
warning: 2 warnings emitted
|
||||||
|
|
||||||
|
|
|
@ -99,26 +99,6 @@ note: `PartialEq` can't be used with `~const` because it isn't annotated with `#
|
||||||
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
||||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||||
|
|
||||||
error: `~const` can only be applied to `#[const_trait]` traits
|
|
||||||
--> $DIR/const-impl-trait.rs:23:22
|
|
||||||
|
|
|
||||||
LL | fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
|
|
||||||
| ^^^^^^ can't be applied to `PartialEq`
|
|
||||||
|
|
|
||||||
note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
|
|
||||||
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
|
||||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
|
||||||
|
|
||||||
error: `~const` can only be applied to `#[const_trait]` traits
|
|
||||||
--> $DIR/const-impl-trait.rs:23:22
|
|
||||||
|
|
|
||||||
LL | fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
|
|
||||||
| ^^^^^^ can't be applied to `PartialEq`
|
|
||||||
|
|
|
||||||
note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
|
|
||||||
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
|
||||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
|
||||||
|
|
||||||
error: `~const` can only be applied to `#[const_trait]` traits
|
error: `~const` can only be applied to `#[const_trait]` traits
|
||||||
--> $DIR/const-impl-trait.rs:27:22
|
--> $DIR/const-impl-trait.rs:27:22
|
||||||
|
|
|
|
||||||
|
@ -149,6 +129,36 @@ note: `PartialEq` can't be used with `~const` because it isn't annotated with `#
|
||||||
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
||||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||||
|
|
||||||
|
error: `~const` can only be applied to `#[const_trait]` traits
|
||||||
|
--> $DIR/const-impl-trait.rs:23:22
|
||||||
|
|
|
||||||
|
LL | fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
|
||||||
|
| ^^^^^^ can't be applied to `PartialEq`
|
||||||
|
|
|
||||||
|
note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
|
||||||
|
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
||||||
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||||
|
|
||||||
|
error: `~const` can only be applied to `#[const_trait]` traits
|
||||||
|
--> $DIR/const-impl-trait.rs:23:22
|
||||||
|
|
|
||||||
|
LL | fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
|
||||||
|
| ^^^^^^ can't be applied to `PartialEq`
|
||||||
|
|
|
||||||
|
note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
|
||||||
|
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
||||||
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||||
|
|
||||||
|
error: `~const` can only be applied to `#[const_trait]` traits
|
||||||
|
--> $DIR/const-impl-trait.rs:23:22
|
||||||
|
|
|
||||||
|
LL | fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
|
||||||
|
| ^^^^^^ can't be applied to `PartialEq`
|
||||||
|
|
|
||||||
|
note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
|
||||||
|
--> $SRC_DIR/core/src/cmp.rs:LL:COL
|
||||||
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||||
|
|
||||||
error[E0015]: cannot call non-const operator in constants
|
error[E0015]: cannot call non-const operator in constants
|
||||||
--> $DIR/const-impl-trait.rs:35:13
|
--> $DIR/const-impl-trait.rs:35:13
|
||||||
|
|
|
|
||||||
|
@ -181,7 +191,7 @@ LL | a == a
|
||||||
|
|
|
|
||||||
= note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
|
= note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
|
||||||
|
|
||||||
error: aborting due to 20 previous errors
|
error: aborting due to 21 previous errors
|
||||||
|
|
||||||
Some errors have detailed explanations: E0015, E0635.
|
Some errors have detailed explanations: E0015, E0635.
|
||||||
For more information about an error, try `rustc --explain E0015`.
|
For more information about an error, try `rustc --explain E0015`.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue