commit
feed376e15
54 changed files with 442 additions and 428 deletions
|
@ -2580,9 +2580,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -125,15 +125,9 @@ pub struct Borrows<'a, 'tcx> {
|
||||||
borrows_out_of_scope_at_location: FxIndexMap<Location, Vec<BorrowIndex>>,
|
borrows_out_of_scope_at_location: FxIndexMap<Location, Vec<BorrowIndex>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct StackEntry {
|
|
||||||
bb: mir::BasicBlock,
|
|
||||||
lo: usize,
|
|
||||||
hi: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct OutOfScopePrecomputer<'a, 'tcx> {
|
struct OutOfScopePrecomputer<'a, 'tcx> {
|
||||||
visited: BitSet<mir::BasicBlock>,
|
visited: BitSet<mir::BasicBlock>,
|
||||||
visit_stack: Vec<StackEntry>,
|
visit_stack: Vec<mir::BasicBlock>,
|
||||||
body: &'a Body<'tcx>,
|
body: &'a Body<'tcx>,
|
||||||
regioncx: &'a RegionInferenceContext<'tcx>,
|
regioncx: &'a RegionInferenceContext<'tcx>,
|
||||||
borrows_out_of_scope_at_location: FxIndexMap<Location, Vec<BorrowIndex>>,
|
borrows_out_of_scope_at_location: FxIndexMap<Location, Vec<BorrowIndex>>,
|
||||||
|
@ -158,29 +152,50 @@ impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
|
||||||
borrow_region: RegionVid,
|
borrow_region: RegionVid,
|
||||||
first_location: Location,
|
first_location: Location,
|
||||||
) {
|
) {
|
||||||
// We visit one BB at a time. The complication is that we may start in the
|
|
||||||
// middle of the first BB visited (the one containing `first_location`), in which
|
|
||||||
// case we may have to later on process the first part of that BB if there
|
|
||||||
// is a path back to its start.
|
|
||||||
|
|
||||||
// For visited BBs, we record the index of the first statement processed.
|
|
||||||
// (In fully processed BBs this index is 0.) Note also that we add BBs to
|
|
||||||
// `visited` once they are added to `stack`, before they are actually
|
|
||||||
// processed, because this avoids the need to look them up again on
|
|
||||||
// completion.
|
|
||||||
self.visited.insert(first_location.block);
|
|
||||||
|
|
||||||
let first_block = first_location.block;
|
let first_block = first_location.block;
|
||||||
let mut first_lo = first_location.statement_index;
|
let first_bb_data = &self.body.basic_blocks[first_block];
|
||||||
let first_hi = self.body[first_block].statements.len();
|
|
||||||
|
|
||||||
self.visit_stack.push(StackEntry { bb: first_block, lo: first_lo, hi: first_hi });
|
// This is the first block, we only want to visit it from the creation of the borrow at
|
||||||
|
// `first_location`.
|
||||||
|
let first_lo = first_location.statement_index;
|
||||||
|
let first_hi = first_bb_data.statements.len();
|
||||||
|
|
||||||
'preorder: while let Some(StackEntry { bb, lo, hi }) = self.visit_stack.pop() {
|
if let Some(kill_stmt) = self.regioncx.first_non_contained_inclusive(
|
||||||
|
borrow_region,
|
||||||
|
first_block,
|
||||||
|
first_lo,
|
||||||
|
first_hi,
|
||||||
|
) {
|
||||||
|
let kill_location = Location { block: first_block, statement_index: kill_stmt };
|
||||||
|
// If region does not contain a point at the location, then add to list and skip
|
||||||
|
// successor locations.
|
||||||
|
debug!("borrow {:?} gets killed at {:?}", borrow_index, kill_location);
|
||||||
|
self.borrows_out_of_scope_at_location
|
||||||
|
.entry(kill_location)
|
||||||
|
.or_default()
|
||||||
|
.push(borrow_index);
|
||||||
|
|
||||||
|
// The borrow is already dead, there is no need to visit other blocks.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The borrow is not dead. Add successor BBs to the work list, if necessary.
|
||||||
|
for succ_bb in first_bb_data.terminator().successors() {
|
||||||
|
if self.visited.insert(succ_bb) {
|
||||||
|
self.visit_stack.push(succ_bb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We may end up visiting `first_block` again. This is not an issue: we know at this point
|
||||||
|
// that it does not kill the borrow in the `first_lo..=first_hi` range, so checking the
|
||||||
|
// `0..first_lo` range and the `0..first_hi` range give the same result.
|
||||||
|
while let Some(block) = self.visit_stack.pop() {
|
||||||
|
let bb_data = &self.body[block];
|
||||||
|
let num_stmts = bb_data.statements.len();
|
||||||
if let Some(kill_stmt) =
|
if let Some(kill_stmt) =
|
||||||
self.regioncx.first_non_contained_inclusive(borrow_region, bb, lo, hi)
|
self.regioncx.first_non_contained_inclusive(borrow_region, block, 0, num_stmts)
|
||||||
{
|
{
|
||||||
let kill_location = Location { block: bb, statement_index: kill_stmt };
|
let kill_location = Location { block, statement_index: kill_stmt };
|
||||||
// If region does not contain a point at the location, then add to list and skip
|
// If region does not contain a point at the location, then add to list and skip
|
||||||
// successor locations.
|
// successor locations.
|
||||||
debug!("borrow {:?} gets killed at {:?}", borrow_index, kill_location);
|
debug!("borrow {:?} gets killed at {:?}", borrow_index, kill_location);
|
||||||
|
@ -188,38 +203,15 @@ impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
|
||||||
.entry(kill_location)
|
.entry(kill_location)
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(borrow_index);
|
.push(borrow_index);
|
||||||
continue 'preorder;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we process the first part of the first basic block (i.e. we encounter that block
|
// We killed the borrow, so we do not visit this block's successors.
|
||||||
// for the second time), we no longer have to visit its successors again.
|
|
||||||
if bb == first_block && hi != first_hi {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add successor BBs to the work list, if necessary.
|
// Add successor BBs to the work list, if necessary.
|
||||||
let bb_data = &self.body[bb];
|
|
||||||
debug_assert!(hi == bb_data.statements.len());
|
|
||||||
for succ_bb in bb_data.terminator().successors() {
|
for succ_bb in bb_data.terminator().successors() {
|
||||||
if !self.visited.insert(succ_bb) {
|
if self.visited.insert(succ_bb) {
|
||||||
if succ_bb == first_block && first_lo > 0 {
|
self.visit_stack.push(succ_bb);
|
||||||
// `succ_bb` has been seen before. If it wasn't
|
|
||||||
// fully processed, add its first part to `stack`
|
|
||||||
// for processing.
|
|
||||||
self.visit_stack.push(StackEntry { bb: succ_bb, lo: 0, hi: first_lo - 1 });
|
|
||||||
|
|
||||||
// And update this entry with 0, to represent the
|
|
||||||
// whole BB being processed.
|
|
||||||
first_lo = 0;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// succ_bb hasn't been seen before. Add it to
|
|
||||||
// `stack` for processing.
|
|
||||||
self.visit_stack.push(StackEntry {
|
|
||||||
bb: succ_bb,
|
|
||||||
lo: 0,
|
|
||||||
hi: self.body[succ_bb].statements.len(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ use crate::base::ExtCtxt;
|
||||||
use pm::bridge::{
|
use pm::bridge::{
|
||||||
server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree,
|
server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree,
|
||||||
};
|
};
|
||||||
use pm::{Delimiter, Level, LineColumn};
|
use pm::{Delimiter, Level};
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token;
|
use rustc_ast::token;
|
||||||
use rustc_ast::tokenstream::{self, Spacing::*, TokenStream};
|
use rustc_ast::tokenstream::{self, Spacing::*, TokenStream};
|
||||||
|
@ -648,25 +648,24 @@ impl server::Span for Rustc<'_, '_> {
|
||||||
|
|
||||||
Range { start: relative_start_pos.0 as usize, end: relative_end_pos.0 as usize }
|
Range { start: relative_start_pos.0 as usize, end: relative_end_pos.0 as usize }
|
||||||
}
|
}
|
||||||
|
fn start(&mut self, span: Self::Span) -> Self::Span {
|
||||||
fn start(&mut self, span: Self::Span) -> LineColumn {
|
|
||||||
let loc = self.sess().source_map().lookup_char_pos(span.lo());
|
|
||||||
LineColumn { line: loc.line, column: loc.col.to_usize() }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn end(&mut self, span: Self::Span) -> LineColumn {
|
|
||||||
let loc = self.sess().source_map().lookup_char_pos(span.hi());
|
|
||||||
LineColumn { line: loc.line, column: loc.col.to_usize() }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn before(&mut self, span: Self::Span) -> Self::Span {
|
|
||||||
span.shrink_to_lo()
|
span.shrink_to_lo()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn after(&mut self, span: Self::Span) -> Self::Span {
|
fn end(&mut self, span: Self::Span) -> Self::Span {
|
||||||
span.shrink_to_hi()
|
span.shrink_to_hi()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn line(&mut self, span: Self::Span) -> usize {
|
||||||
|
let loc = self.sess().source_map().lookup_char_pos(span.lo());
|
||||||
|
loc.line
|
||||||
|
}
|
||||||
|
|
||||||
|
fn column(&mut self, span: Self::Span) -> usize {
|
||||||
|
let loc = self.sess().source_map().lookup_char_pos(span.lo());
|
||||||
|
loc.col.to_usize() + 1
|
||||||
|
}
|
||||||
|
|
||||||
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
|
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
|
||||||
let self_loc = self.sess().source_map().lookup_char_pos(first.lo());
|
let self_loc = self.sess().source_map().lookup_char_pos(first.lo());
|
||||||
let other_loc = self.sess().source_map().lookup_char_pos(second.lo());
|
let other_loc = self.sess().source_map().lookup_char_pos(second.lo());
|
||||||
|
|
|
@ -9,12 +9,12 @@ use rustc_span::symbol::Ident;
|
||||||
use rustc_span::{ErrorGuaranteed, Span};
|
use rustc_span::{ErrorGuaranteed, Span};
|
||||||
use rustc_trait_selection::traits;
|
use rustc_trait_selection::traits;
|
||||||
|
|
||||||
use crate::astconv::{AstConv, ConvertedBinding, ConvertedBindingKind};
|
use crate::astconv::{
|
||||||
|
AstConv, ConvertedBinding, ConvertedBindingKind, OnlySelfBounds, PredicateFilter,
|
||||||
|
};
|
||||||
use crate::bounds::Bounds;
|
use crate::bounds::Bounds;
|
||||||
use crate::errors::{MultipleRelaxedDefaultBounds, ValueOfAssociatedStructAlreadySpecified};
|
use crate::errors::{MultipleRelaxedDefaultBounds, ValueOfAssociatedStructAlreadySpecified};
|
||||||
|
|
||||||
use super::OnlySelfBounds;
|
|
||||||
|
|
||||||
impl<'tcx> dyn AstConv<'tcx> + '_ {
|
impl<'tcx> dyn AstConv<'tcx> + '_ {
|
||||||
/// Sets `implicitly_sized` to true on `Bounds` if necessary
|
/// Sets `implicitly_sized` to true on `Bounds` if necessary
|
||||||
pub(crate) fn add_implicitly_sized(
|
pub(crate) fn add_implicitly_sized(
|
||||||
|
@ -176,12 +176,36 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
||||||
&self,
|
&self,
|
||||||
param_ty: Ty<'tcx>,
|
param_ty: Ty<'tcx>,
|
||||||
ast_bounds: &[hir::GenericBound<'_>],
|
ast_bounds: &[hir::GenericBound<'_>],
|
||||||
only_self_bounds: OnlySelfBounds,
|
filter: PredicateFilter,
|
||||||
) -> Bounds<'tcx> {
|
) -> Bounds<'tcx> {
|
||||||
let mut bounds = Bounds::default();
|
let mut bounds = Bounds::default();
|
||||||
|
|
||||||
|
let only_self_bounds = match filter {
|
||||||
|
PredicateFilter::All | PredicateFilter::SelfAndAssociatedTypeBounds => {
|
||||||
|
OnlySelfBounds(false)
|
||||||
|
}
|
||||||
|
PredicateFilter::SelfOnly | PredicateFilter::SelfThatDefines(_) => OnlySelfBounds(true),
|
||||||
|
};
|
||||||
|
|
||||||
self.add_bounds(
|
self.add_bounds(
|
||||||
param_ty,
|
param_ty,
|
||||||
ast_bounds.iter(),
|
ast_bounds.iter().filter(|bound| {
|
||||||
|
match filter {
|
||||||
|
PredicateFilter::All
|
||||||
|
| PredicateFilter::SelfOnly
|
||||||
|
| PredicateFilter::SelfAndAssociatedTypeBounds => true,
|
||||||
|
PredicateFilter::SelfThatDefines(assoc_name) => {
|
||||||
|
if let Some(trait_ref) = bound.trait_ref()
|
||||||
|
&& let Some(trait_did) = trait_ref.trait_def_id()
|
||||||
|
&& self.tcx().trait_may_define_assoc_item(trait_did, assoc_name)
|
||||||
|
{
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
&mut bounds,
|
&mut bounds,
|
||||||
ty::List::empty(),
|
ty::List::empty(),
|
||||||
only_self_bounds,
|
only_self_bounds,
|
||||||
|
@ -191,38 +215,6 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
||||||
bounds
|
bounds
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the bounds in `ast_bounds` that refer to traits which define an associated type
|
|
||||||
/// named `assoc_name` into ty::Bounds. Ignore the rest.
|
|
||||||
pub(crate) fn compute_bounds_that_match_assoc_item(
|
|
||||||
&self,
|
|
||||||
param_ty: Ty<'tcx>,
|
|
||||||
ast_bounds: &[hir::GenericBound<'_>],
|
|
||||||
assoc_name: Ident,
|
|
||||||
) -> Bounds<'tcx> {
|
|
||||||
let mut result = Vec::new();
|
|
||||||
|
|
||||||
for ast_bound in ast_bounds {
|
|
||||||
if let Some(trait_ref) = ast_bound.trait_ref()
|
|
||||||
&& let Some(trait_did) = trait_ref.trait_def_id()
|
|
||||||
&& self.tcx().trait_may_define_assoc_item(trait_did, assoc_name)
|
|
||||||
{
|
|
||||||
result.push(ast_bound.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut bounds = Bounds::default();
|
|
||||||
self.add_bounds(
|
|
||||||
param_ty,
|
|
||||||
result.iter(),
|
|
||||||
&mut bounds,
|
|
||||||
ty::List::empty(),
|
|
||||||
OnlySelfBounds(true),
|
|
||||||
);
|
|
||||||
debug!(?bounds);
|
|
||||||
|
|
||||||
bounds
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Given an HIR binding like `Item = Foo` or `Item: Foo`, pushes the corresponding predicates
|
/// Given an HIR binding like `Item = Foo` or `Item: Foo`, pushes the corresponding predicates
|
||||||
/// onto `bounds`.
|
/// onto `bounds`.
|
||||||
///
|
///
|
||||||
|
|
|
@ -58,6 +58,24 @@ pub struct PathSeg(pub DefId, pub usize);
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct OnlySelfBounds(pub bool);
|
pub struct OnlySelfBounds(pub bool);
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
pub enum PredicateFilter {
|
||||||
|
/// All predicates may be implied by the trait.
|
||||||
|
All,
|
||||||
|
|
||||||
|
/// Only traits that reference `Self: ..` are implied by the trait.
|
||||||
|
SelfOnly,
|
||||||
|
|
||||||
|
/// Only traits that reference `Self: ..` and define an associated type
|
||||||
|
/// with the given ident are implied by the trait.
|
||||||
|
SelfThatDefines(Ident),
|
||||||
|
|
||||||
|
/// Only traits that reference `Self: ..` and their associated type bounds.
|
||||||
|
/// For example, given `Self: Tr<A: B>`, this would expand to `Self: Tr`
|
||||||
|
/// and `<Self as Tr>::A: B`.
|
||||||
|
SelfAndAssociatedTypeBounds,
|
||||||
|
}
|
||||||
|
|
||||||
pub trait AstConv<'tcx> {
|
pub trait AstConv<'tcx> {
|
||||||
fn tcx(&self) -> TyCtxt<'tcx>;
|
fn tcx(&self) -> TyCtxt<'tcx>;
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use super::ItemCtxt;
|
use super::ItemCtxt;
|
||||||
use crate::astconv::{AstConv, OnlySelfBounds};
|
use crate::astconv::{AstConv, PredicateFilter};
|
||||||
use rustc_hir as hir;
|
use rustc_hir as hir;
|
||||||
use rustc_infer::traits::util;
|
use rustc_infer::traits::util;
|
||||||
use rustc_middle::ty::subst::InternalSubsts;
|
use rustc_middle::ty::subst::InternalSubsts;
|
||||||
|
@ -26,7 +26,7 @@ fn associated_type_bounds<'tcx>(
|
||||||
);
|
);
|
||||||
|
|
||||||
let icx = ItemCtxt::new(tcx, assoc_item_def_id);
|
let icx = ItemCtxt::new(tcx, assoc_item_def_id);
|
||||||
let mut bounds = icx.astconv().compute_bounds(item_ty, ast_bounds, OnlySelfBounds(false));
|
let mut bounds = icx.astconv().compute_bounds(item_ty, ast_bounds, PredicateFilter::All);
|
||||||
// Associated types are implicitly sized unless a `?Sized` bound is found
|
// Associated types are implicitly sized unless a `?Sized` bound is found
|
||||||
icx.astconv().add_implicitly_sized(&mut bounds, item_ty, ast_bounds, None, span);
|
icx.astconv().add_implicitly_sized(&mut bounds, item_ty, ast_bounds, None, span);
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ fn opaque_type_bounds<'tcx>(
|
||||||
) -> &'tcx [(ty::Clause<'tcx>, Span)] {
|
) -> &'tcx [(ty::Clause<'tcx>, Span)] {
|
||||||
ty::print::with_no_queries!({
|
ty::print::with_no_queries!({
|
||||||
let icx = ItemCtxt::new(tcx, opaque_def_id);
|
let icx = ItemCtxt::new(tcx, opaque_def_id);
|
||||||
let mut bounds = icx.astconv().compute_bounds(item_ty, ast_bounds, OnlySelfBounds(false));
|
let mut bounds = icx.astconv().compute_bounds(item_ty, ast_bounds, PredicateFilter::All);
|
||||||
// Opaque types are implicitly sized unless a `?Sized` bound is found
|
// Opaque types are implicitly sized unless a `?Sized` bound is found
|
||||||
icx.astconv().add_implicitly_sized(&mut bounds, item_ty, ast_bounds, None, span);
|
icx.astconv().add_implicitly_sized(&mut bounds, item_ty, ast_bounds, None, span);
|
||||||
debug!(?bounds);
|
debug!(?bounds);
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::astconv::{AstConv, OnlySelfBounds};
|
use crate::astconv::{AstConv, OnlySelfBounds, PredicateFilter};
|
||||||
use crate::bounds::Bounds;
|
use crate::bounds::Bounds;
|
||||||
use crate::collect::ItemCtxt;
|
use crate::collect::ItemCtxt;
|
||||||
use crate::constrained_generic_params as cgp;
|
use crate::constrained_generic_params as cgp;
|
||||||
|
@ -125,7 +125,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||||
if let Some(self_bounds) = is_trait {
|
if let Some(self_bounds) = is_trait {
|
||||||
predicates.extend(
|
predicates.extend(
|
||||||
icx.astconv()
|
icx.astconv()
|
||||||
.compute_bounds(tcx.types.self_param, self_bounds, OnlySelfBounds(false))
|
.compute_bounds(tcx.types.self_param, self_bounds, PredicateFilter::All)
|
||||||
.clauses(),
|
.clauses(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -530,19 +530,6 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
|
||||||
pub enum PredicateFilter {
|
|
||||||
/// All predicates may be implied by the trait
|
|
||||||
All,
|
|
||||||
|
|
||||||
/// Only traits that reference `Self: ..` are implied by the trait
|
|
||||||
SelfOnly,
|
|
||||||
|
|
||||||
/// Only traits that reference `Self: ..` and define an associated type
|
|
||||||
/// with the given ident are implied by the trait
|
|
||||||
SelfThatDefines(Ident),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Ensures that the super-predicates of the trait with a `DefId`
|
/// Ensures that the super-predicates of the trait with a `DefId`
|
||||||
/// of `trait_def_id` are converted and stored. This also ensures that
|
/// of `trait_def_id` are converted and stored. This also ensures that
|
||||||
/// the transitive super-predicates are converted.
|
/// the transitive super-predicates are converted.
|
||||||
|
@ -564,11 +551,15 @@ pub(super) fn implied_predicates_of(
|
||||||
tcx: TyCtxt<'_>,
|
tcx: TyCtxt<'_>,
|
||||||
trait_def_id: LocalDefId,
|
trait_def_id: LocalDefId,
|
||||||
) -> ty::GenericPredicates<'_> {
|
) -> ty::GenericPredicates<'_> {
|
||||||
if tcx.is_trait_alias(trait_def_id.to_def_id()) {
|
implied_predicates_with_filter(
|
||||||
implied_predicates_with_filter(tcx, trait_def_id.to_def_id(), PredicateFilter::All)
|
tcx,
|
||||||
} else {
|
trait_def_id.to_def_id(),
|
||||||
tcx.super_predicates_of(trait_def_id)
|
if tcx.is_trait_alias(trait_def_id.to_def_id()) {
|
||||||
}
|
PredicateFilter::All
|
||||||
|
} else {
|
||||||
|
PredicateFilter::SelfAndAssociatedTypeBounds
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensures that the super-predicates of the trait with a `DefId`
|
/// Ensures that the super-predicates of the trait with a `DefId`
|
||||||
|
@ -601,44 +592,14 @@ pub(super) fn implied_predicates_with_filter(
|
||||||
let icx = ItemCtxt::new(tcx, trait_def_id);
|
let icx = ItemCtxt::new(tcx, trait_def_id);
|
||||||
|
|
||||||
let self_param_ty = tcx.types.self_param;
|
let self_param_ty = tcx.types.self_param;
|
||||||
let (superbounds, where_bounds_that_match) = match filter {
|
let superbounds = icx.astconv().compute_bounds(self_param_ty, bounds, filter);
|
||||||
PredicateFilter::All => (
|
|
||||||
// Convert the bounds that follow the colon (or equal in trait aliases)
|
let where_bounds_that_match = icx.type_parameter_bounds_in_generics(
|
||||||
icx.astconv().compute_bounds(self_param_ty, bounds, OnlySelfBounds(false)),
|
generics,
|
||||||
// Also include all where clause bounds
|
item.owner_id.def_id,
|
||||||
icx.type_parameter_bounds_in_generics(
|
self_param_ty,
|
||||||
generics,
|
filter,
|
||||||
item.owner_id.def_id,
|
);
|
||||||
self_param_ty,
|
|
||||||
OnlySelfBounds(false),
|
|
||||||
None,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
PredicateFilter::SelfOnly => (
|
|
||||||
// Convert the bounds that follow the colon (or equal in trait aliases)
|
|
||||||
icx.astconv().compute_bounds(self_param_ty, bounds, OnlySelfBounds(true)),
|
|
||||||
// Include where clause bounds for `Self`
|
|
||||||
icx.type_parameter_bounds_in_generics(
|
|
||||||
generics,
|
|
||||||
item.owner_id.def_id,
|
|
||||||
self_param_ty,
|
|
||||||
OnlySelfBounds(true),
|
|
||||||
None,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
PredicateFilter::SelfThatDefines(assoc_name) => (
|
|
||||||
// Convert the bounds that follow the colon (or equal) that reference the associated name
|
|
||||||
icx.astconv().compute_bounds_that_match_assoc_item(self_param_ty, bounds, assoc_name),
|
|
||||||
// Include where clause bounds for `Self` that reference the associated name
|
|
||||||
icx.type_parameter_bounds_in_generics(
|
|
||||||
generics,
|
|
||||||
item.owner_id.def_id,
|
|
||||||
self_param_ty,
|
|
||||||
OnlySelfBounds(true),
|
|
||||||
Some(assoc_name),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Combine the two lists to form the complete set of superbounds:
|
// Combine the two lists to form the complete set of superbounds:
|
||||||
let implied_bounds =
|
let implied_bounds =
|
||||||
|
@ -743,8 +704,7 @@ pub(super) fn type_param_predicates(
|
||||||
ast_generics,
|
ast_generics,
|
||||||
def_id,
|
def_id,
|
||||||
ty,
|
ty,
|
||||||
OnlySelfBounds(true),
|
PredicateFilter::SelfThatDefines(assoc_name),
|
||||||
Some(assoc_name),
|
|
||||||
)
|
)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(predicate, _)| match predicate.kind().skip_binder() {
|
.filter(|(predicate, _)| match predicate.kind().skip_binder() {
|
||||||
|
@ -768,8 +728,7 @@ impl<'tcx> ItemCtxt<'tcx> {
|
||||||
ast_generics: &'tcx hir::Generics<'tcx>,
|
ast_generics: &'tcx hir::Generics<'tcx>,
|
||||||
param_def_id: LocalDefId,
|
param_def_id: LocalDefId,
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
only_self_bounds: OnlySelfBounds,
|
filter: PredicateFilter,
|
||||||
assoc_name: Option<Ident>,
|
|
||||||
) -> Vec<(ty::Clause<'tcx>, Span)> {
|
) -> Vec<(ty::Clause<'tcx>, Span)> {
|
||||||
let mut bounds = Bounds::default();
|
let mut bounds = Bounds::default();
|
||||||
|
|
||||||
|
@ -778,9 +737,23 @@ impl<'tcx> ItemCtxt<'tcx> {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let (only_self_bounds, assoc_name) = match filter {
|
||||||
|
PredicateFilter::All | PredicateFilter::SelfAndAssociatedTypeBounds => {
|
||||||
|
(OnlySelfBounds(false), None)
|
||||||
|
}
|
||||||
|
PredicateFilter::SelfOnly => (OnlySelfBounds(true), None),
|
||||||
|
PredicateFilter::SelfThatDefines(assoc_name) => {
|
||||||
|
(OnlySelfBounds(true), Some(assoc_name))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Subtle: If we're collecting `SelfAndAssociatedTypeBounds`, then we
|
||||||
|
// want to only consider predicates with `Self: ...`, but we don't want
|
||||||
|
// `OnlySelfBounds(true)` since we want to collect the nested associated
|
||||||
|
// type bound as well.
|
||||||
let bound_ty = if predicate.is_param_bound(param_def_id.to_def_id()) {
|
let bound_ty = if predicate.is_param_bound(param_def_id.to_def_id()) {
|
||||||
ty
|
ty
|
||||||
} else if !only_self_bounds.0 {
|
} else if matches!(filter, PredicateFilter::All) {
|
||||||
self.to_ty(predicate.bounded_ty)
|
self.to_ty(predicate.bounded_ty)
|
||||||
} else {
|
} else {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -354,7 +354,12 @@ pub fn try_print_query_stack(handler: &Handler, num_frames: Option<usize>) {
|
||||||
// state if it was responsible for triggering the panic.
|
// state if it was responsible for triggering the panic.
|
||||||
let i = ty::tls::with_context_opt(|icx| {
|
let i = ty::tls::with_context_opt(|icx| {
|
||||||
if let Some(icx) = icx {
|
if let Some(icx) = icx {
|
||||||
print_query_stack(QueryCtxt::new(icx.tcx), icx.query, handler, num_frames)
|
ty::print::with_no_queries!(print_query_stack(
|
||||||
|
QueryCtxt::new(icx.tcx),
|
||||||
|
icx.query,
|
||||||
|
handler,
|
||||||
|
num_frames
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
|
@ -660,9 +660,6 @@ pub enum ImplSource<'tcx, N> {
|
||||||
|
|
||||||
/// ImplSource for trait upcasting coercion
|
/// ImplSource for trait upcasting coercion
|
||||||
TraitUpcasting(ImplSourceTraitUpcastingData<N>),
|
TraitUpcasting(ImplSourceTraitUpcastingData<N>),
|
||||||
|
|
||||||
/// ImplSource for a trait alias.
|
|
||||||
TraitAlias(ImplSourceTraitAliasData<'tcx, N>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx, N> ImplSource<'tcx, N> {
|
impl<'tcx, N> ImplSource<'tcx, N> {
|
||||||
|
@ -671,7 +668,6 @@ impl<'tcx, N> ImplSource<'tcx, N> {
|
||||||
ImplSource::UserDefined(i) => i.nested,
|
ImplSource::UserDefined(i) => i.nested,
|
||||||
ImplSource::Param(n, _) | ImplSource::Builtin(n) => n,
|
ImplSource::Param(n, _) | ImplSource::Builtin(n) => n,
|
||||||
ImplSource::Object(d) => d.nested,
|
ImplSource::Object(d) => d.nested,
|
||||||
ImplSource::TraitAlias(d) => d.nested,
|
|
||||||
ImplSource::TraitUpcasting(d) => d.nested,
|
ImplSource::TraitUpcasting(d) => d.nested,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -681,7 +677,6 @@ impl<'tcx, N> ImplSource<'tcx, N> {
|
||||||
ImplSource::UserDefined(i) => &i.nested,
|
ImplSource::UserDefined(i) => &i.nested,
|
||||||
ImplSource::Param(n, _) | ImplSource::Builtin(n) => &n,
|
ImplSource::Param(n, _) | ImplSource::Builtin(n) => &n,
|
||||||
ImplSource::Object(d) => &d.nested,
|
ImplSource::Object(d) => &d.nested,
|
||||||
ImplSource::TraitAlias(d) => &d.nested,
|
|
||||||
ImplSource::TraitUpcasting(d) => &d.nested,
|
ImplSource::TraitUpcasting(d) => &d.nested,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -691,7 +686,6 @@ impl<'tcx, N> ImplSource<'tcx, N> {
|
||||||
ImplSource::UserDefined(i) => &mut i.nested,
|
ImplSource::UserDefined(i) => &mut i.nested,
|
||||||
ImplSource::Param(n, _) | ImplSource::Builtin(n) => n,
|
ImplSource::Param(n, _) | ImplSource::Builtin(n) => n,
|
||||||
ImplSource::Object(d) => &mut d.nested,
|
ImplSource::Object(d) => &mut d.nested,
|
||||||
ImplSource::TraitAlias(d) => &mut d.nested,
|
|
||||||
ImplSource::TraitUpcasting(d) => &mut d.nested,
|
ImplSource::TraitUpcasting(d) => &mut d.nested,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -709,15 +703,9 @@ impl<'tcx, N> ImplSource<'tcx, N> {
|
||||||
ImplSource::Param(n, ct) => ImplSource::Param(n.into_iter().map(f).collect(), ct),
|
ImplSource::Param(n, ct) => ImplSource::Param(n.into_iter().map(f).collect(), ct),
|
||||||
ImplSource::Builtin(n) => ImplSource::Builtin(n.into_iter().map(f).collect()),
|
ImplSource::Builtin(n) => ImplSource::Builtin(n.into_iter().map(f).collect()),
|
||||||
ImplSource::Object(o) => ImplSource::Object(ImplSourceObjectData {
|
ImplSource::Object(o) => ImplSource::Object(ImplSourceObjectData {
|
||||||
upcast_trait_def_id: o.upcast_trait_def_id,
|
|
||||||
vtable_base: o.vtable_base,
|
vtable_base: o.vtable_base,
|
||||||
nested: o.nested.into_iter().map(f).collect(),
|
nested: o.nested.into_iter().map(f).collect(),
|
||||||
}),
|
}),
|
||||||
ImplSource::TraitAlias(d) => ImplSource::TraitAlias(ImplSourceTraitAliasData {
|
|
||||||
alias_def_id: d.alias_def_id,
|
|
||||||
substs: d.substs,
|
|
||||||
nested: d.nested.into_iter().map(f).collect(),
|
|
||||||
}),
|
|
||||||
ImplSource::TraitUpcasting(d) => {
|
ImplSource::TraitUpcasting(d) => {
|
||||||
ImplSource::TraitUpcasting(ImplSourceTraitUpcastingData {
|
ImplSource::TraitUpcasting(ImplSourceTraitUpcastingData {
|
||||||
vtable_vptr_slot: d.vtable_vptr_slot,
|
vtable_vptr_slot: d.vtable_vptr_slot,
|
||||||
|
@ -761,9 +749,6 @@ pub struct ImplSourceTraitUpcastingData<N> {
|
||||||
#[derive(PartialEq, Eq, Clone, TyEncodable, TyDecodable, HashStable, Lift)]
|
#[derive(PartialEq, Eq, Clone, TyEncodable, TyDecodable, HashStable, Lift)]
|
||||||
#[derive(TypeFoldable, TypeVisitable)]
|
#[derive(TypeFoldable, TypeVisitable)]
|
||||||
pub struct ImplSourceObjectData<N> {
|
pub struct ImplSourceObjectData<N> {
|
||||||
/// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`.
|
|
||||||
pub upcast_trait_def_id: DefId,
|
|
||||||
|
|
||||||
/// The vtable is formed by concatenating together the method lists of
|
/// The vtable is formed by concatenating together the method lists of
|
||||||
/// the base object trait and all supertraits, pointers to supertrait vtable will
|
/// the base object trait and all supertraits, pointers to supertrait vtable will
|
||||||
/// be provided when necessary; this is the start of `upcast_trait_ref`'s methods
|
/// be provided when necessary; this is the start of `upcast_trait_ref`'s methods
|
||||||
|
@ -773,14 +758,6 @@ pub struct ImplSourceObjectData<N> {
|
||||||
pub nested: Vec<N>,
|
pub nested: Vec<N>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, Lift)]
|
|
||||||
#[derive(TypeFoldable, TypeVisitable)]
|
|
||||||
pub struct ImplSourceTraitAliasData<'tcx, N> {
|
|
||||||
pub alias_def_id: DefId,
|
|
||||||
pub substs: SubstsRef<'tcx>,
|
|
||||||
pub nested: Vec<N>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, HashStable, PartialOrd, Ord)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, HashStable, PartialOrd, Ord)]
|
||||||
pub enum ObjectSafetyViolation {
|
pub enum ObjectSafetyViolation {
|
||||||
/// `Self: Sized` declared on the trait.
|
/// `Self: Sized` declared on the trait.
|
||||||
|
|
|
@ -17,8 +17,6 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
|
||||||
write!(f, "ImplSourceParamData({:?}, {:?})", n, ct)
|
write!(f, "ImplSourceParamData({:?}, {:?})", n, ct)
|
||||||
}
|
}
|
||||||
|
|
||||||
super::ImplSource::TraitAlias(ref d) => write!(f, "{:?}", d),
|
|
||||||
|
|
||||||
super::ImplSource::TraitUpcasting(ref d) => write!(f, "{:?}", d),
|
super::ImplSource::TraitUpcasting(ref d) => write!(f, "{:?}", d),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -48,18 +46,8 @@ impl<N: fmt::Debug> fmt::Debug for traits::ImplSourceObjectData<N> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"ImplSourceObjectData(upcast={:?}, vtable_base={}, nested={:?})",
|
"ImplSourceObjectData(vtable_base={}, nested={:?})",
|
||||||
self.upcast_trait_def_id, self.vtable_base, self.nested
|
self.vtable_base, self.nested
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSourceTraitAliasData<'tcx, N> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"ImplSourceTraitAliasData(alias_def_id={:?}, substs={:?}, nested={:?})",
|
|
||||||
self.alias_def_id, self.substs, self.nested
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,10 @@ macro_rules! define_helper {
|
||||||
$tl.with(|c| c.set(self.0))
|
$tl.with(|c| c.set(self.0))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn $name() -> bool {
|
||||||
|
$tl.with(|c| c.get())
|
||||||
|
}
|
||||||
)+
|
)+
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -676,7 +680,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||||
p!(")")
|
p!(")")
|
||||||
}
|
}
|
||||||
ty::FnDef(def_id, substs) => {
|
ty::FnDef(def_id, substs) => {
|
||||||
if NO_QUERIES.with(|q| q.get()) {
|
if with_no_queries() {
|
||||||
p!(print_def_path(def_id, substs));
|
p!(print_def_path(def_id, substs));
|
||||||
} else {
|
} else {
|
||||||
let sig = self.tcx().fn_sig(def_id).subst(self.tcx(), substs);
|
let sig = self.tcx().fn_sig(def_id).subst(self.tcx(), substs);
|
||||||
|
@ -732,7 +736,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||||
p!(print_def_path(def_id, &[]));
|
p!(print_def_path(def_id, &[]));
|
||||||
}
|
}
|
||||||
ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ref data) => {
|
ty::Alias(ty::Projection | ty::Inherent | ty::Weak, ref data) => {
|
||||||
if !(self.should_print_verbose() || NO_QUERIES.with(|q| q.get()))
|
if !(self.should_print_verbose() || with_no_queries())
|
||||||
&& self.tcx().is_impl_trait_in_trait(data.def_id)
|
&& self.tcx().is_impl_trait_in_trait(data.def_id)
|
||||||
{
|
{
|
||||||
return self.pretty_print_opaque_impl_type(data.def_id, data.substs);
|
return self.pretty_print_opaque_impl_type(data.def_id, data.substs);
|
||||||
|
@ -779,7 +783,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||||
return Ok(self);
|
return Ok(self);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if NO_QUERIES.with(|q| q.get()) {
|
if with_no_queries() {
|
||||||
p!(print_def_path(def_id, &[]));
|
p!(print_def_path(def_id, &[]));
|
||||||
return Ok(self);
|
return Ok(self);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1746,7 +1750,8 @@ impl DerefMut for FmtPrinter<'_, '_> {
|
||||||
|
|
||||||
impl<'a, 'tcx> FmtPrinter<'a, 'tcx> {
|
impl<'a, 'tcx> FmtPrinter<'a, 'tcx> {
|
||||||
pub fn new(tcx: TyCtxt<'tcx>, ns: Namespace) -> Self {
|
pub fn new(tcx: TyCtxt<'tcx>, ns: Namespace) -> Self {
|
||||||
Self::new_with_limit(tcx, ns, tcx.type_length_limit())
|
let limit = if with_no_queries() { Limit::new(1048576) } else { tcx.type_length_limit() };
|
||||||
|
Self::new_with_limit(tcx, ns, limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_with_limit(tcx: TyCtxt<'tcx>, ns: Namespace, type_length_limit: Limit) -> Self {
|
pub fn new_with_limit(tcx: TyCtxt<'tcx>, ns: Namespace, type_length_limit: Limit) -> Self {
|
||||||
|
@ -2999,7 +3004,7 @@ fn for_each_def(tcx: TyCtxt<'_>, mut collect_fn: impl for<'b> FnMut(&'b Ident, N
|
||||||
///
|
///
|
||||||
/// The implementation uses similar import discovery logic to that of 'use' suggestions.
|
/// The implementation uses similar import discovery logic to that of 'use' suggestions.
|
||||||
///
|
///
|
||||||
/// See also [`DelayDm`](rustc_error_messages::DelayDm) and [`with_no_trimmed_paths`].
|
/// See also [`DelayDm`](rustc_error_messages::DelayDm) and [`with_no_trimmed_paths!`].
|
||||||
fn trimmed_def_paths(tcx: TyCtxt<'_>, (): ()) -> FxHashMap<DefId, Symbol> {
|
fn trimmed_def_paths(tcx: TyCtxt<'_>, (): ()) -> FxHashMap<DefId, Symbol> {
|
||||||
let mut map: FxHashMap<DefId, Symbol> = FxHashMap::default();
|
let mut map: FxHashMap<DefId, Symbol> = FxHashMap::default();
|
||||||
|
|
||||||
|
|
|
@ -214,6 +214,9 @@ mir_build_mutation_of_layout_constrained_field_requires_unsafe_unsafe_op_in_unsa
|
||||||
|
|
||||||
mir_build_non_const_path = runtime values cannot be referenced in patterns
|
mir_build_non_const_path = runtime values cannot be referenced in patterns
|
||||||
|
|
||||||
|
mir_build_non_exhaustive_match_all_arms_guarded =
|
||||||
|
match arms with guards don't count towards exhaustivity
|
||||||
|
|
||||||
mir_build_non_exhaustive_omitted_pattern = some variants are not matched explicitly
|
mir_build_non_exhaustive_omitted_pattern = some variants are not matched explicitly
|
||||||
.help = ensure that all variants are matched explicitly by adding the suggested match arms
|
.help = ensure that all variants are matched explicitly by adding the suggested match arms
|
||||||
.note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found
|
.note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found
|
||||||
|
|
|
@ -432,6 +432,10 @@ impl<'a> IntoDiagnostic<'a> for NonExhaustivePatternsTypeNotEmpty<'_, '_, '_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Subdiagnostic)]
|
||||||
|
#[note(mir_build_non_exhaustive_match_all_arms_guarded)]
|
||||||
|
pub struct NonExhaustiveMatchAllArmsGuarded;
|
||||||
|
|
||||||
#[derive(Diagnostic)]
|
#[derive(Diagnostic)]
|
||||||
#[diag(mir_build_static_in_pattern, code = "E0158")]
|
#[diag(mir_build_static_in_pattern, code = "E0158")]
|
||||||
pub struct StaticInPattern {
|
pub struct StaticInPattern {
|
||||||
|
|
|
@ -830,6 +830,11 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||||
_ => " or multiple match arms",
|
_ => " or multiple match arms",
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let all_arms_have_guards = arms.iter().all(|arm_id| thir[*arm_id].guard.is_some());
|
||||||
|
if !is_empty_match && all_arms_have_guards {
|
||||||
|
err.subdiagnostic(NonExhaustiveMatchAllArmsGuarded);
|
||||||
|
}
|
||||||
if let Some((span, sugg)) = suggestion {
|
if let Some((span, sugg)) = suggestion {
|
||||||
err.span_suggestion_verbose(span, msg, sugg, Applicability::HasPlaceholders);
|
err.span_suggestion_verbose(span, msg, sugg, Applicability::HasPlaceholders);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -16,7 +16,7 @@ use rustc_middle::query::on_disk_cache::AbsoluteBytePos;
|
||||||
use rustc_middle::query::on_disk_cache::{CacheDecoder, CacheEncoder, EncodedDepNodeIndex};
|
use rustc_middle::query::on_disk_cache::{CacheDecoder, CacheEncoder, EncodedDepNodeIndex};
|
||||||
use rustc_middle::query::Key;
|
use rustc_middle::query::Key;
|
||||||
use rustc_middle::ty::tls::{self, ImplicitCtxt};
|
use rustc_middle::ty::tls::{self, ImplicitCtxt};
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, print::with_no_queries, TyCtxt};
|
||||||
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
|
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
|
||||||
use rustc_query_system::ich::StableHashingContext;
|
use rustc_query_system::ich::StableHashingContext;
|
||||||
use rustc_query_system::query::{
|
use rustc_query_system::query::{
|
||||||
|
@ -312,7 +312,7 @@ pub(crate) fn create_query_frame<
|
||||||
);
|
);
|
||||||
let description =
|
let description =
|
||||||
if tcx.sess.verbose() { format!("{description} [{name:?}]") } else { description };
|
if tcx.sess.verbose() { format!("{description} [{name:?}]") } else { description };
|
||||||
let span = if kind == dep_graph::DepKind::def_span {
|
let span = if kind == dep_graph::DepKind::def_span || with_no_queries() {
|
||||||
// The `def_span` query is used to calculate `default_span`,
|
// The `def_span` query is used to calculate `default_span`,
|
||||||
// so exit to avoid infinite recursion.
|
// so exit to avoid infinite recursion.
|
||||||
None
|
None
|
||||||
|
@ -320,7 +320,7 @@ pub(crate) fn create_query_frame<
|
||||||
Some(key.default_span(tcx))
|
Some(key.default_span(tcx))
|
||||||
};
|
};
|
||||||
let def_id = key.key_as_def_id();
|
let def_id = key.key_as_def_id();
|
||||||
let def_kind = if kind == dep_graph::DepKind::opt_def_kind {
|
let def_kind = if kind == dep_graph::DepKind::opt_def_kind || with_no_queries() {
|
||||||
// Try to avoid infinite recursion.
|
// Try to avoid infinite recursion.
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -5,7 +5,7 @@ pub mod suggestions;
|
||||||
use super::{
|
use super::{
|
||||||
FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, Obligation, ObligationCause,
|
FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, Obligation, ObligationCause,
|
||||||
ObligationCauseCode, ObligationCtxt, OutputTypeParameterMismatch, Overflow,
|
ObligationCauseCode, ObligationCtxt, OutputTypeParameterMismatch, Overflow,
|
||||||
PredicateObligation, SelectionContext, SelectionError, TraitNotObjectSafe,
|
PredicateObligation, SelectionError, TraitNotObjectSafe,
|
||||||
};
|
};
|
||||||
use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode};
|
use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode};
|
||||||
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||||
|
@ -2272,55 +2272,40 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
let obligation = obligation.with(self.tcx, trait_ref);
|
let ambiguities = ambiguity::recompute_applicable_impls(
|
||||||
let mut selcx = SelectionContext::new(&self);
|
self.infcx,
|
||||||
match selcx.select_from_obligation(&obligation) {
|
&obligation.with(self.tcx, trait_ref),
|
||||||
Ok(None) => {
|
);
|
||||||
let ambiguities =
|
let has_non_region_infer =
|
||||||
ambiguity::recompute_applicable_impls(self.infcx, &obligation);
|
trait_ref.skip_binder().substs.types().any(|t| !t.is_ty_or_numeric_infer());
|
||||||
let has_non_region_infer = trait_ref
|
// It doesn't make sense to talk about applicable impls if there are more
|
||||||
.skip_binder()
|
// than a handful of them.
|
||||||
.substs
|
if ambiguities.len() > 1 && ambiguities.len() < 10 && has_non_region_infer {
|
||||||
.types()
|
if self.tainted_by_errors().is_some() && subst.is_none() {
|
||||||
.any(|t| !t.is_ty_or_numeric_infer());
|
// If `subst.is_none()`, then this is probably two param-env
|
||||||
// It doesn't make sense to talk about applicable impls if there are more
|
// candidates or impl candidates that are equal modulo lifetimes.
|
||||||
// than a handful of them.
|
// Therefore, if we've already emitted an error, just skip this
|
||||||
if ambiguities.len() > 1 && ambiguities.len() < 10 && has_non_region_infer {
|
// one, since it's not particularly actionable.
|
||||||
if self.tainted_by_errors().is_some() && subst.is_none() {
|
err.cancel();
|
||||||
// If `subst.is_none()`, then this is probably two param-env
|
return;
|
||||||
// candidates or impl candidates that are equal modulo lifetimes.
|
|
||||||
// Therefore, if we've already emitted an error, just skip this
|
|
||||||
// one, since it's not particularly actionable.
|
|
||||||
err.cancel();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
self.annotate_source_of_ambiguity(&mut err, &ambiguities, predicate);
|
|
||||||
} else {
|
|
||||||
if self.tainted_by_errors().is_some() {
|
|
||||||
err.cancel();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
err.note(format!("cannot satisfy `{}`", predicate));
|
|
||||||
let impl_candidates = self.find_similar_impl_candidates(
|
|
||||||
predicate.to_opt_poly_trait_pred().unwrap(),
|
|
||||||
);
|
|
||||||
if impl_candidates.len() < 10 {
|
|
||||||
self.report_similar_impl_candidates(
|
|
||||||
impl_candidates.as_slice(),
|
|
||||||
trait_ref,
|
|
||||||
obligation.cause.body_id,
|
|
||||||
&mut err,
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {
|
self.annotate_source_of_ambiguity(&mut err, &ambiguities, predicate);
|
||||||
if self.tainted_by_errors().is_some() {
|
} else {
|
||||||
err.cancel();
|
if self.tainted_by_errors().is_some() {
|
||||||
return;
|
err.cancel();
|
||||||
}
|
return;
|
||||||
err.note(format!("cannot satisfy `{}`", predicate));
|
}
|
||||||
|
err.note(format!("cannot satisfy `{}`", predicate));
|
||||||
|
let impl_candidates = self
|
||||||
|
.find_similar_impl_candidates(predicate.to_opt_poly_trait_pred().unwrap());
|
||||||
|
if impl_candidates.len() < 10 {
|
||||||
|
self.report_similar_impl_candidates(
|
||||||
|
impl_candidates.as_slice(),
|
||||||
|
trait_ref,
|
||||||
|
obligation.cause.body_id,
|
||||||
|
&mut err,
|
||||||
|
false,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1720,7 +1720,6 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
|
||||||
};
|
};
|
||||||
|
|
||||||
let eligible = match &impl_source {
|
let eligible = match &impl_source {
|
||||||
super::ImplSource::TraitAlias(_) => true,
|
|
||||||
super::ImplSource::UserDefined(impl_data) => {
|
super::ImplSource::UserDefined(impl_data) => {
|
||||||
// We have to be careful when projecting out of an
|
// We have to be careful when projecting out of an
|
||||||
// impl because of specialization. If we are not in
|
// impl because of specialization. If we are not in
|
||||||
|
@ -2012,8 +2011,7 @@ fn confirm_select_candidate<'cx, 'tcx>(
|
||||||
}
|
}
|
||||||
super::ImplSource::Object(_)
|
super::ImplSource::Object(_)
|
||||||
| super::ImplSource::Param(..)
|
| super::ImplSource::Param(..)
|
||||||
| super::ImplSource::TraitUpcasting(_)
|
| super::ImplSource::TraitUpcasting(_) => {
|
||||||
| super::ImplSource::TraitAlias(..) => {
|
|
||||||
// we don't create Select candidates with this kind of resolution
|
// we don't create Select candidates with this kind of resolution
|
||||||
span_bug!(
|
span_bug!(
|
||||||
obligation.cause.span,
|
obligation.cause.span,
|
||||||
|
|
|
@ -27,10 +27,9 @@ use crate::traits::vtable::{
|
||||||
};
|
};
|
||||||
use crate::traits::{
|
use crate::traits::{
|
||||||
BuiltinDerivedObligation, ImplDerivedObligation, ImplDerivedObligationCause, ImplSource,
|
BuiltinDerivedObligation, ImplDerivedObligation, ImplDerivedObligationCause, ImplSource,
|
||||||
ImplSourceObjectData, ImplSourceTraitAliasData, ImplSourceTraitUpcastingData,
|
ImplSourceObjectData, ImplSourceTraitUpcastingData, ImplSourceUserDefinedData, Normalized,
|
||||||
ImplSourceUserDefinedData, Normalized, Obligation, ObligationCause,
|
Obligation, ObligationCause, OutputTypeParameterMismatch, PredicateObligation, Selection,
|
||||||
OutputTypeParameterMismatch, PredicateObligation, Selection, SelectionError,
|
SelectionError, TraitNotObjectSafe, TraitObligation, Unimplemented,
|
||||||
TraitNotObjectSafe, TraitObligation, Unimplemented,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::BuiltinImplConditions;
|
use super::BuiltinImplConditions;
|
||||||
|
@ -105,7 +104,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|
|
||||||
TraitAliasCandidate => {
|
TraitAliasCandidate => {
|
||||||
let data = self.confirm_trait_alias_candidate(obligation);
|
let data = self.confirm_trait_alias_candidate(obligation);
|
||||||
ImplSource::TraitAlias(data)
|
ImplSource::Builtin(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
BuiltinObjectCandidate => {
|
BuiltinObjectCandidate => {
|
||||||
|
@ -652,11 +651,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
(unnormalized_upcast_trait_ref, ty::Binder::dummy(object_trait_ref)),
|
(unnormalized_upcast_trait_ref, ty::Binder::dummy(object_trait_ref)),
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(ImplSourceObjectData {
|
Ok(ImplSourceObjectData { vtable_base, nested })
|
||||||
upcast_trait_def_id: upcast_trait_ref.def_id(),
|
|
||||||
vtable_base,
|
|
||||||
nested,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_fn_pointer_candidate(
|
fn confirm_fn_pointer_candidate(
|
||||||
|
@ -721,10 +716,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
fn confirm_trait_alias_candidate(
|
fn confirm_trait_alias_candidate(
|
||||||
&mut self,
|
&mut self,
|
||||||
obligation: &TraitObligation<'tcx>,
|
obligation: &TraitObligation<'tcx>,
|
||||||
) -> ImplSourceTraitAliasData<'tcx, PredicateObligation<'tcx>> {
|
) -> Vec<PredicateObligation<'tcx>> {
|
||||||
debug!(?obligation, "confirm_trait_alias_candidate");
|
debug!(?obligation, "confirm_trait_alias_candidate");
|
||||||
|
|
||||||
let alias_def_id = obligation.predicate.def_id();
|
|
||||||
let predicate = self.infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
let predicate = self.infcx.instantiate_binder_with_placeholders(obligation.predicate);
|
||||||
let trait_ref = predicate.trait_ref;
|
let trait_ref = predicate.trait_ref;
|
||||||
let trait_def_id = trait_ref.def_id;
|
let trait_def_id = trait_ref.def_id;
|
||||||
|
@ -741,7 +735,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
|
|
||||||
debug!(?trait_def_id, ?trait_obligations, "trait alias obligations");
|
debug!(?trait_def_id, ?trait_obligations, "trait alias obligations");
|
||||||
|
|
||||||
ImplSourceTraitAliasData { alias_def_id, substs, nested: trait_obligations }
|
trait_obligations
|
||||||
}
|
}
|
||||||
|
|
||||||
fn confirm_generator_candidate(
|
fn confirm_generator_candidate(
|
||||||
|
|
|
@ -248,7 +248,7 @@ pub fn get_vtable_index_of_object_method<'tcx, N>(
|
||||||
) -> Option<usize> {
|
) -> Option<usize> {
|
||||||
// Count number of methods preceding the one we are selecting and
|
// Count number of methods preceding the one we are selecting and
|
||||||
// add them to the total offset.
|
// add them to the total offset.
|
||||||
tcx.own_existential_vtable_entries(object.upcast_trait_def_id)
|
tcx.own_existential_vtable_entries(tcx.parent(method_def_id))
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.position(|def_id| def_id == method_def_id)
|
.position(|def_id| def_id == method_def_id)
|
||||||
|
|
|
@ -293,9 +293,7 @@ fn resolve_associated_item<'tcx>(
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
traits::ImplSource::Param(..)
|
traits::ImplSource::Param(..) | traits::ImplSource::TraitUpcasting(_) => None,
|
||||||
| traits::ImplSource::TraitAlias(..)
|
|
||||||
| traits::ImplSource::TraitUpcasting(_) => None,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
#![deny(unsafe_code)]
|
#![deny(unsafe_code)]
|
||||||
|
|
||||||
use crate::{Delimiter, Level, LineColumn, Spacing};
|
use crate::{Delimiter, Level, Spacing};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::marker;
|
use std::marker;
|
||||||
|
@ -95,10 +95,10 @@ macro_rules! with_api {
|
||||||
fn parent($self: $S::Span) -> Option<$S::Span>;
|
fn parent($self: $S::Span) -> Option<$S::Span>;
|
||||||
fn source($self: $S::Span) -> $S::Span;
|
fn source($self: $S::Span) -> $S::Span;
|
||||||
fn byte_range($self: $S::Span) -> Range<usize>;
|
fn byte_range($self: $S::Span) -> Range<usize>;
|
||||||
fn start($self: $S::Span) -> LineColumn;
|
fn start($self: $S::Span) -> $S::Span;
|
||||||
fn end($self: $S::Span) -> LineColumn;
|
fn end($self: $S::Span) -> $S::Span;
|
||||||
fn before($self: $S::Span) -> $S::Span;
|
fn line($self: $S::Span) -> usize;
|
||||||
fn after($self: $S::Span) -> $S::Span;
|
fn column($self: $S::Span) -> usize;
|
||||||
fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
|
fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
|
||||||
fn subspan($self: $S::Span, start: Bound<usize>, end: Bound<usize>) -> Option<$S::Span>;
|
fn subspan($self: $S::Span, start: Bound<usize>, end: Bound<usize>) -> Option<$S::Span>;
|
||||||
fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
|
fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
|
||||||
|
@ -299,7 +299,6 @@ mark_noop! {
|
||||||
Delimiter,
|
Delimiter,
|
||||||
LitKind,
|
LitKind,
|
||||||
Level,
|
Level,
|
||||||
LineColumn,
|
|
||||||
Spacing,
|
Spacing,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -319,7 +318,6 @@ rpc_encode_decode!(
|
||||||
Help,
|
Help,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
rpc_encode_decode!(struct LineColumn { line, column });
|
|
||||||
rpc_encode_decode!(
|
rpc_encode_decode!(
|
||||||
enum Spacing {
|
enum Spacing {
|
||||||
Alone,
|
Alone,
|
||||||
|
|
|
@ -43,7 +43,6 @@ mod diagnostic;
|
||||||
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
|
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
|
||||||
pub use diagnostic::{Diagnostic, Level, MultiSpan};
|
pub use diagnostic::{Diagnostic, Level, MultiSpan};
|
||||||
|
|
||||||
use std::cmp::Ordering;
|
|
||||||
use std::ops::{Range, RangeBounds};
|
use std::ops::{Range, RangeBounds};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -494,28 +493,32 @@ impl Span {
|
||||||
self.0.byte_range()
|
self.0.byte_range()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the starting line/column in the source file for this span.
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
pub fn start(&self) -> LineColumn {
|
|
||||||
self.0.start().add_1_to_column()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the ending line/column in the source file for this span.
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
pub fn end(&self) -> LineColumn {
|
|
||||||
self.0.end().add_1_to_column()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates an empty span pointing to directly before this span.
|
/// Creates an empty span pointing to directly before this span.
|
||||||
#[unstable(feature = "proc_macro_span_shrink", issue = "87552")]
|
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
||||||
pub fn before(&self) -> Span {
|
pub fn start(&self) -> Span {
|
||||||
Span(self.0.before())
|
Span(self.0.start())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates an empty span pointing to directly after this span.
|
/// Creates an empty span pointing to directly after this span.
|
||||||
#[unstable(feature = "proc_macro_span_shrink", issue = "87552")]
|
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
||||||
pub fn after(&self) -> Span {
|
pub fn end(&self) -> Span {
|
||||||
Span(self.0.after())
|
Span(self.0.end())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The one-indexed line of the source file where the span starts.
|
||||||
|
///
|
||||||
|
/// To obtain the line of the span's end, use `span.end().line()`.
|
||||||
|
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
||||||
|
pub fn line(&self) -> usize {
|
||||||
|
self.0.line()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The one-indexed column of the source file where the span starts.
|
||||||
|
///
|
||||||
|
/// To obtain the column of the span's end, use `span.end().column()`.
|
||||||
|
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
||||||
|
pub fn column(&self) -> usize {
|
||||||
|
self.0.column()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new span encompassing `self` and `other`.
|
/// Creates a new span encompassing `self` and `other`.
|
||||||
|
@ -586,44 +589,6 @@ impl fmt::Debug for Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A line-column pair representing the start or end of a `Span`.
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
|
||||||
pub struct LineColumn {
|
|
||||||
/// The 1-indexed line in the source file on which the span starts or ends (inclusive).
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
pub line: usize,
|
|
||||||
/// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
|
|
||||||
/// file on which the span starts or ends (inclusive).
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
pub column: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LineColumn {
|
|
||||||
fn add_1_to_column(self) -> Self {
|
|
||||||
LineColumn { line: self.line, column: self.column + 1 }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
impl !Send for LineColumn {}
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
impl !Sync for LineColumn {}
|
|
||||||
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
impl Ord for LineColumn {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
self.line.cmp(&other.line).then(self.column.cmp(&other.column))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
|
||||||
impl PartialOrd for LineColumn {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The source file of a given `Span`.
|
/// The source file of a given `Span`.
|
||||||
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
#[unstable(feature = "proc_macro_span", issue = "54725")]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
|
@ -527,9 +527,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1525,7 +1525,6 @@ fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
|
||||||
map.insert("toggle-all-docs".into(), 1);
|
map.insert("toggle-all-docs".into(), 1);
|
||||||
map.insert("all-types".into(), 1);
|
map.insert("all-types".into(), 1);
|
||||||
map.insert("default-settings".into(), 1);
|
map.insert("default-settings".into(), 1);
|
||||||
map.insert("rustdoc-vars".into(), 1);
|
|
||||||
map.insert("sidebar-vars".into(), 1);
|
map.insert("sidebar-vars".into(), 1);
|
||||||
map.insert("copy-path".into(), 1);
|
map.insert("copy-path".into(), 1);
|
||||||
map.insert("TOC".into(), 1);
|
map.insert("TOC".into(), 1);
|
||||||
|
|
|
@ -3,13 +3,13 @@
|
||||||
|
|
||||||
// Local js definitions:
|
// Local js definitions:
|
||||||
/* global addClass, getCurrentValue, onEachLazy, removeClass, browserSupportsHistoryApi */
|
/* global addClass, getCurrentValue, onEachLazy, removeClass, browserSupportsHistoryApi */
|
||||||
/* global updateLocalStorage */
|
/* global updateLocalStorage, getVar */
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
(function() {
|
(function() {
|
||||||
|
|
||||||
const rootPath = document.getElementById("rustdoc-vars").attributes["data-root-path"].value;
|
const rootPath = getVar("root-path");
|
||||||
|
|
||||||
const NAME_OFFSET = 0;
|
const NAME_OFFSET = 0;
|
||||||
const DIRS_OFFSET = 1;
|
const DIRS_OFFSET = 1;
|
||||||
|
|
|
@ -108,7 +108,7 @@ function getCurrentValue(name) {
|
||||||
// Get a value from the rustdoc-vars div, which is used to convey data from
|
// Get a value from the rustdoc-vars div, which is used to convey data from
|
||||||
// Rust to the JS. If there is no such element, return null.
|
// Rust to the JS. If there is no such element, return null.
|
||||||
const getVar = (function getVar(name) {
|
const getVar = (function getVar(name) {
|
||||||
const el = document.getElementById("rustdoc-vars");
|
const el = document.querySelector("head > meta[name='rustdoc-vars']");
|
||||||
return el ? el.attributes["data-" + name].value : null;
|
return el ? el.attributes["data-" + name].value : null;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
></script> {# #}
|
></script> {# #}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<div id="rustdoc-vars" {#+ #}
|
<meta name="rustdoc-vars" {#+ #}
|
||||||
data-root-path="{{page.root_path|safe}}" {#+ #}
|
data-root-path="{{page.root_path|safe}}" {#+ #}
|
||||||
data-static-root-path="{{static_root_path|safe}}" {#+ #}
|
data-static-root-path="{{static_root_path|safe}}" {#+ #}
|
||||||
data-current-crate="{{layout.krate}}" {#+ #}
|
data-current-crate="{{layout.krate}}" {#+ #}
|
||||||
|
@ -39,7 +39,6 @@
|
||||||
data-theme-dark-css="{{files.theme_dark_css}}" {#+ #}
|
data-theme-dark-css="{{files.theme_dark_css}}" {#+ #}
|
||||||
data-theme-ayu-css="{{files.theme_ayu_css}}" {#+ #}
|
data-theme-ayu-css="{{files.theme_ayu_css}}" {#+ #}
|
||||||
> {# #}
|
> {# #}
|
||||||
</div> {# #}
|
|
||||||
<script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {# #}
|
<script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {# #}
|
||||||
{% if page.css_class.contains("crate") %}
|
{% if page.css_class.contains("crate") %}
|
||||||
<script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {# #}
|
<script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {# #}
|
||||||
|
|
|
@ -528,9 +528,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -178,9 +178,9 @@ checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
08fd6f719ee764cf62659ddf481e22dbfe4b8894
|
75726cae37317c7262b69d3e9fd11a3496a88d04
|
||||||
|
|
|
@ -83,9 +83,9 @@ version = "0.1.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.49"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -193,9 +193,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.49"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -1304,9 +1304,9 @@ version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
|
@ -8,10 +8,7 @@
|
||||||
//!
|
//!
|
||||||
//! FIXME: No span and source file information is implemented yet
|
//! FIXME: No span and source file information is implemented yet
|
||||||
|
|
||||||
use proc_macro::{
|
use proc_macro::bridge::{self, server};
|
||||||
bridge::{self, server},
|
|
||||||
LineColumn,
|
|
||||||
};
|
|
||||||
|
|
||||||
mod token_stream;
|
mod token_stream;
|
||||||
pub use token_stream::TokenStream;
|
pub use token_stream::TokenStream;
|
||||||
|
@ -304,14 +301,6 @@ impl server::Span for RustAnalyzer {
|
||||||
// FIXME handle span
|
// FIXME handle span
|
||||||
Range { start: 0, end: 0 }
|
Range { start: 0, end: 0 }
|
||||||
}
|
}
|
||||||
fn start(&mut self, _span: Self::Span) -> LineColumn {
|
|
||||||
// FIXME handle span
|
|
||||||
LineColumn { line: 0, column: 0 }
|
|
||||||
}
|
|
||||||
fn end(&mut self, _span: Self::Span) -> LineColumn {
|
|
||||||
// FIXME handle span
|
|
||||||
LineColumn { line: 0, column: 0 }
|
|
||||||
}
|
|
||||||
fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
|
fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
|
||||||
// Just return the first span again, because some macros will unwrap the result.
|
// Just return the first span again, because some macros will unwrap the result.
|
||||||
Some(first)
|
Some(first)
|
||||||
|
@ -330,13 +319,23 @@ impl server::Span for RustAnalyzer {
|
||||||
tt::TokenId::unspecified()
|
tt::TokenId::unspecified()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn after(&mut self, _self_: Self::Span) -> Self::Span {
|
fn end(&mut self, _self_: Self::Span) -> Self::Span {
|
||||||
tt::TokenId::unspecified()
|
tt::TokenId::unspecified()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn before(&mut self, _self_: Self::Span) -> Self::Span {
|
fn start(&mut self, _self_: Self::Span) -> Self::Span {
|
||||||
tt::TokenId::unspecified()
|
tt::TokenId::unspecified()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn line(&mut self, _span: Self::Span) -> usize {
|
||||||
|
// FIXME handle line
|
||||||
|
0
|
||||||
|
}
|
||||||
|
|
||||||
|
fn column(&mut self, _span: Self::Span) -> usize {
|
||||||
|
// FIXME handle column
|
||||||
|
0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl server::Symbol for RustAnalyzer {
|
impl server::Symbol for RustAnalyzer {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// @has test.css
|
// @has test.css
|
||||||
// @has foo/struct.Foo.html
|
// @has foo/struct.Foo.html
|
||||||
// @has - '//*[@id="rustdoc-vars"]/@data-themes' 'test'
|
// @has - '//*[@name="rustdoc-vars"]/@data-themes' 'test'
|
||||||
pub struct Foo;
|
pub struct Foo;
|
||||||
|
|
19
tests/ui/associated-type-bounds/implied-in-supertrait.rs
Normal file
19
tests/ui/associated-type-bounds/implied-in-supertrait.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(associated_type_bounds)]
|
||||||
|
|
||||||
|
trait Trait: Super<Assoc: Bound> {}
|
||||||
|
|
||||||
|
trait Super {
|
||||||
|
type Assoc;
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Bound {}
|
||||||
|
|
||||||
|
fn foo<T>(x: T)
|
||||||
|
where
|
||||||
|
T: Trait,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,28 @@
|
||||||
|
// edition:2021
|
||||||
|
// check-pass
|
||||||
|
|
||||||
|
#![feature(async_fn_in_trait, return_position_impl_trait_in_trait, return_type_notation)]
|
||||||
|
//~^ WARN the feature `return_type_notation` is incomplete
|
||||||
|
|
||||||
|
use std::future::Future;
|
||||||
|
|
||||||
|
struct JoinHandle<T>(fn() -> T);
|
||||||
|
|
||||||
|
fn spawn<T>(_: impl Future<Output = T>) -> JoinHandle<T> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Foo {
|
||||||
|
async fn bar(&self) -> i32;
|
||||||
|
}
|
||||||
|
|
||||||
|
trait SendFoo: Foo<bar(): Send> + Send {}
|
||||||
|
|
||||||
|
fn foobar(foo: impl SendFoo) -> JoinHandle<i32> {
|
||||||
|
spawn(async move {
|
||||||
|
let future = foo.bar();
|
||||||
|
future.await
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
|
@ -0,0 +1,11 @@
|
||||||
|
warning: the feature `return_type_notation` is incomplete and may not be safe to use and/or cause compiler crashes
|
||||||
|
--> $DIR/rtn-implied-in-supertrait.rs:4:68
|
||||||
|
|
|
||||||
|
LL | #![feature(async_fn_in_trait, return_position_impl_trait_in_trait, return_type_notation)]
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
= note: see issue #109417 <https://github.com/rust-lang/rust/issues/109417> for more information
|
||||||
|
= note: `#[warn(incomplete_features)]` on by default
|
||||||
|
|
||||||
|
warning: 1 warning emitted
|
||||||
|
|
|
@ -8,11 +8,6 @@ extern crate proc_macro;
|
||||||
|
|
||||||
use proc_macro::{quote, Span, TokenStream, TokenTree};
|
use proc_macro::{quote, Span, TokenStream, TokenTree};
|
||||||
|
|
||||||
fn assert_same_span(a: Span, b: Span) {
|
|
||||||
assert_eq!(a.start(), b.start());
|
|
||||||
assert_eq!(a.end(), b.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
// This macro generates a macro with the same macro definition as `manual_foo` in
|
// This macro generates a macro with the same macro definition as `manual_foo` in
|
||||||
// `same-sequence-span.rs` but with the same span for all sequences.
|
// `same-sequence-span.rs` but with the same span for all sequences.
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
|
|
|
@ -17,15 +17,14 @@ LL | $(= $z:tt)*
|
||||||
error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
|
error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
|
||||||
--> $DIR/same-sequence-span.rs:19:1
|
--> $DIR/same-sequence-span.rs:19:1
|
||||||
|
|
|
|
||||||
|
LL | | }
|
||||||
|
| |_________________________________^ not allowed after `expr` fragments
|
||||||
|
LL |
|
||||||
LL | proc_macro_sequence::make_foo!();
|
LL | proc_macro_sequence::make_foo!();
|
||||||
| ^-------------------------------
|
| ^-------------------------------
|
||||||
| |
|
| |
|
||||||
| _in this macro invocation
|
| _in this macro invocation
|
||||||
| |
|
| |
|
||||||
LL | |
|
|
||||||
LL | |
|
|
||||||
LL | | fn main() {}
|
|
||||||
| |_________________________________^ not allowed after `expr` fragments
|
|
||||||
|
|
|
|
||||||
= note: allowed there are: `=>`, `,` or `;`
|
= note: allowed there are: `=>`, `,` or `;`
|
||||||
= note: this error originates in the macro `proc_macro_sequence::make_foo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
= note: this error originates in the macro `proc_macro_sequence::make_foo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||||
|
|
|
@ -176,6 +176,7 @@ LL | match_guarded_arm!(0u8);
|
||||||
| ^^^ pattern `_` not covered
|
| ^^^ pattern `_` not covered
|
||||||
|
|
|
|
||||||
= note: the matched value is of type `u8`
|
= note: the matched value is of type `u8`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -183,7 +184,7 @@ LL + _ => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
||||||
--> $DIR/empty-match.rs:133:24
|
--> $DIR/empty-match.rs:134:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyStruct1);
|
LL | match_guarded_arm!(NonEmptyStruct1);
|
||||||
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
||||||
|
@ -194,6 +195,7 @@ note: `NonEmptyStruct1` defined here
|
||||||
LL | struct NonEmptyStruct1;
|
LL | struct NonEmptyStruct1;
|
||||||
| ^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyStruct1`
|
= note: the matched value is of type `NonEmptyStruct1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -201,7 +203,7 @@ LL + NonEmptyStruct1 => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
||||||
--> $DIR/empty-match.rs:137:24
|
--> $DIR/empty-match.rs:139:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
||||||
|
@ -212,6 +214,7 @@ note: `NonEmptyStruct2` defined here
|
||||||
LL | struct NonEmptyStruct2(bool);
|
LL | struct NonEmptyStruct2(bool);
|
||||||
| ^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyStruct2`
|
= note: the matched value is of type `NonEmptyStruct2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -219,7 +222,7 @@ LL + NonEmptyStruct2(_) => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
||||||
--> $DIR/empty-match.rs:141:24
|
--> $DIR/empty-match.rs:144:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
||||||
|
@ -230,6 +233,7 @@ note: `NonEmptyUnion1` defined here
|
||||||
LL | union NonEmptyUnion1 {
|
LL | union NonEmptyUnion1 {
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyUnion1`
|
= note: the matched value is of type `NonEmptyUnion1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -237,7 +241,7 @@ LL + NonEmptyUnion1 { .. } => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
||||||
--> $DIR/empty-match.rs:145:24
|
--> $DIR/empty-match.rs:149:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
||||||
|
@ -248,6 +252,7 @@ note: `NonEmptyUnion2` defined here
|
||||||
LL | union NonEmptyUnion2 {
|
LL | union NonEmptyUnion2 {
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyUnion2`
|
= note: the matched value is of type `NonEmptyUnion2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -255,7 +260,7 @@ LL + NonEmptyUnion2 { .. } => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||||
--> $DIR/empty-match.rs:149:24
|
--> $DIR/empty-match.rs:154:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||||
|
@ -268,6 +273,7 @@ LL | enum NonEmptyEnum1 {
|
||||||
LL | Foo(bool),
|
LL | Foo(bool),
|
||||||
| ^^^ not covered
|
| ^^^ not covered
|
||||||
= note: the matched value is of type `NonEmptyEnum1`
|
= note: the matched value is of type `NonEmptyEnum1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -275,7 +281,7 @@ LL + NonEmptyEnum1::Foo(_) => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||||
--> $DIR/empty-match.rs:153:24
|
--> $DIR/empty-match.rs:159:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||||
|
@ -291,6 +297,7 @@ LL | Foo(bool),
|
||||||
LL | Bar,
|
LL | Bar,
|
||||||
| ^^^ not covered
|
| ^^^ not covered
|
||||||
= note: the matched value is of type `NonEmptyEnum2`
|
= note: the matched value is of type `NonEmptyEnum2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern, a match arm with multiple or-patterns as shown, or multiple match arms
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern, a match arm with multiple or-patterns as shown, or multiple match arms
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -298,7 +305,7 @@ LL + NonEmptyEnum2::Foo(_) | NonEmptyEnum2::Bar => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||||
--> $DIR/empty-match.rs:157:24
|
--> $DIR/empty-match.rs:164:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
||||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||||
|
@ -309,6 +316,7 @@ note: `NonEmptyEnum5` defined here
|
||||||
LL | enum NonEmptyEnum5 {
|
LL | enum NonEmptyEnum5 {
|
||||||
| ^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyEnum5`
|
= note: the matched value is of type `NonEmptyEnum5`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern as shown, or multiple match arms
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern as shown, or multiple match arms
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
|
|
@ -175,6 +175,7 @@ LL | match_guarded_arm!(0u8);
|
||||||
| ^^^ pattern `_` not covered
|
| ^^^ pattern `_` not covered
|
||||||
|
|
|
|
||||||
= note: the matched value is of type `u8`
|
= note: the matched value is of type `u8`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -182,7 +183,7 @@ LL + _ => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyStruct1` not covered
|
||||||
--> $DIR/empty-match.rs:133:24
|
--> $DIR/empty-match.rs:134:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyStruct1);
|
LL | match_guarded_arm!(NonEmptyStruct1);
|
||||||
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
| ^^^^^^^^^^^^^^^ pattern `NonEmptyStruct1` not covered
|
||||||
|
@ -193,6 +194,7 @@ note: `NonEmptyStruct1` defined here
|
||||||
LL | struct NonEmptyStruct1;
|
LL | struct NonEmptyStruct1;
|
||||||
| ^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyStruct1`
|
= note: the matched value is of type `NonEmptyStruct1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -200,7 +202,7 @@ LL + NonEmptyStruct1 => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyStruct2(_)` not covered
|
||||||
--> $DIR/empty-match.rs:137:24
|
--> $DIR/empty-match.rs:139:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
LL | match_guarded_arm!(NonEmptyStruct2(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyStruct2(_)` not covered
|
||||||
|
@ -211,6 +213,7 @@ note: `NonEmptyStruct2` defined here
|
||||||
LL | struct NonEmptyStruct2(bool);
|
LL | struct NonEmptyStruct2(bool);
|
||||||
| ^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyStruct2`
|
= note: the matched value is of type `NonEmptyStruct2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -218,7 +221,7 @@ LL + NonEmptyStruct2(_) => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyUnion1 { .. }` not covered
|
||||||
--> $DIR/empty-match.rs:141:24
|
--> $DIR/empty-match.rs:144:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
LL | match_guarded_arm!((NonEmptyUnion1 { foo: () }));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion1 { .. }` not covered
|
||||||
|
@ -229,6 +232,7 @@ note: `NonEmptyUnion1` defined here
|
||||||
LL | union NonEmptyUnion1 {
|
LL | union NonEmptyUnion1 {
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyUnion1`
|
= note: the matched value is of type `NonEmptyUnion1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -236,7 +240,7 @@ LL + NonEmptyUnion1 { .. } => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyUnion2 { .. }` not covered
|
||||||
--> $DIR/empty-match.rs:145:24
|
--> $DIR/empty-match.rs:149:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
LL | match_guarded_arm!((NonEmptyUnion2 { foo: () }));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyUnion2 { .. }` not covered
|
||||||
|
@ -247,6 +251,7 @@ note: `NonEmptyUnion2` defined here
|
||||||
LL | union NonEmptyUnion2 {
|
LL | union NonEmptyUnion2 {
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyUnion2`
|
= note: the matched value is of type `NonEmptyUnion2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -254,7 +259,7 @@ LL + NonEmptyUnion2 { .. } => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum1::Foo(_)` not covered
|
||||||
--> $DIR/empty-match.rs:149:24
|
--> $DIR/empty-match.rs:154:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
LL | match_guarded_arm!(NonEmptyEnum1::Foo(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||||
|
@ -267,6 +272,7 @@ LL | enum NonEmptyEnum1 {
|
||||||
LL | Foo(bool),
|
LL | Foo(bool),
|
||||||
| ^^^ not covered
|
| ^^^ not covered
|
||||||
= note: the matched value is of type `NonEmptyEnum1`
|
= note: the matched value is of type `NonEmptyEnum1`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -274,7 +280,7 @@ LL + NonEmptyEnum1::Foo(_) => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||||
--> $DIR/empty-match.rs:153:24
|
--> $DIR/empty-match.rs:159:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
LL | match_guarded_arm!(NonEmptyEnum2::Foo(true));
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||||
|
@ -290,6 +296,7 @@ LL | Foo(bool),
|
||||||
LL | Bar,
|
LL | Bar,
|
||||||
| ^^^ not covered
|
| ^^^ not covered
|
||||||
= note: the matched value is of type `NonEmptyEnum2`
|
= note: the matched value is of type `NonEmptyEnum2`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern, a match arm with multiple or-patterns as shown, or multiple match arms
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern, a match arm with multiple or-patterns as shown, or multiple match arms
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
@ -297,7 +304,7 @@ LL + NonEmptyEnum2::Foo(_) | NonEmptyEnum2::Bar => todo!()
|
||||||
|
|
|
|
||||||
|
|
||||||
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
error[E0004]: non-exhaustive patterns: `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||||
--> $DIR/empty-match.rs:157:24
|
--> $DIR/empty-match.rs:164:24
|
||||||
|
|
|
|
||||||
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
LL | match_guarded_arm!(NonEmptyEnum5::V1);
|
||||||
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
| ^^^^^^^^^^^^^^^^^ patterns `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||||
|
@ -308,6 +315,7 @@ note: `NonEmptyEnum5` defined here
|
||||||
LL | enum NonEmptyEnum5 {
|
LL | enum NonEmptyEnum5 {
|
||||||
| ^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^
|
||||||
= note: the matched value is of type `NonEmptyEnum5`
|
= note: the matched value is of type `NonEmptyEnum5`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern as shown, or multiple match arms
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern as shown, or multiple match arms
|
||||||
|
|
|
|
||||||
LL ~ _ if false => {},
|
LL ~ _ if false => {},
|
||||||
|
|
|
@ -128,34 +128,42 @@ fn main() {
|
||||||
|
|
||||||
match_guarded_arm!(0u8); //~ ERROR `_` not covered
|
match_guarded_arm!(0u8); //~ ERROR `_` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE pattern `_` not covered
|
//~| NOTE pattern `_` not covered
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!(NonEmptyStruct1); //~ ERROR `NonEmptyStruct1` not covered
|
match_guarded_arm!(NonEmptyStruct1); //~ ERROR `NonEmptyStruct1` not covered
|
||||||
//~| NOTE pattern `NonEmptyStruct1` not covered
|
//~| NOTE pattern `NonEmptyStruct1` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!(NonEmptyStruct2(true)); //~ ERROR `NonEmptyStruct2(_)` not covered
|
match_guarded_arm!(NonEmptyStruct2(true)); //~ ERROR `NonEmptyStruct2(_)` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE pattern `NonEmptyStruct2(_)` not covered
|
//~| NOTE pattern `NonEmptyStruct2(_)` not covered
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!((NonEmptyUnion1 { foo: () })); //~ ERROR `NonEmptyUnion1 { .. }` not covered
|
match_guarded_arm!((NonEmptyUnion1 { foo: () })); //~ ERROR `NonEmptyUnion1 { .. }` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE pattern `NonEmptyUnion1 { .. }` not covered
|
//~| NOTE pattern `NonEmptyUnion1 { .. }` not covered
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!((NonEmptyUnion2 { foo: () })); //~ ERROR `NonEmptyUnion2 { .. }` not covered
|
match_guarded_arm!((NonEmptyUnion2 { foo: () })); //~ ERROR `NonEmptyUnion2 { .. }` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE pattern `NonEmptyUnion2 { .. }` not covered
|
//~| NOTE pattern `NonEmptyUnion2 { .. }` not covered
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!(NonEmptyEnum1::Foo(true)); //~ ERROR `NonEmptyEnum1::Foo(_)` not covered
|
match_guarded_arm!(NonEmptyEnum1::Foo(true)); //~ ERROR `NonEmptyEnum1::Foo(_)` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE pattern `NonEmptyEnum1::Foo(_)` not covered
|
//~| NOTE pattern `NonEmptyEnum1::Foo(_)` not covered
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!(NonEmptyEnum2::Foo(true)); //~ ERROR `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
match_guarded_arm!(NonEmptyEnum2::Foo(true)); //~ ERROR `NonEmptyEnum2::Foo(_)` and `NonEmptyEnum2::Bar` not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE patterns `NonEmptyEnum2::Foo(_)` and
|
//~| NOTE patterns `NonEmptyEnum2::Foo(_)` and
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
match_guarded_arm!(NonEmptyEnum5::V1); //~ ERROR `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
match_guarded_arm!(NonEmptyEnum5::V1); //~ ERROR `NonEmptyEnum5::V1`, `NonEmptyEnum5::V2`, `NonEmptyEnum5::V3` and 2 more not covered
|
||||||
//~| NOTE the matched value is of type
|
//~| NOTE the matched value is of type
|
||||||
//~| NOTE patterns `NonEmptyEnum5::V1`,
|
//~| NOTE patterns `NonEmptyEnum5::V1`,
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
//~| NOTE in this expansion of match_guarded_arm!
|
//~| NOTE in this expansion of match_guarded_arm!
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +1,19 @@
|
||||||
#![feature(box_patterns)]
|
#![feature(box_patterns)]
|
||||||
|
|
||||||
struct HTMLImageData {
|
struct HTMLImageData {
|
||||||
image: Option<String>
|
image: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ElementData {
|
struct ElementData {
|
||||||
kind: Box<ElementKind>
|
kind: Box<ElementKind>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ElementKind {
|
enum ElementKind {
|
||||||
HTMLImageElement(HTMLImageData)
|
HTMLImageElement(HTMLImageData),
|
||||||
}
|
}
|
||||||
|
|
||||||
enum NodeKind {
|
enum NodeKind {
|
||||||
Element(ElementData)
|
Element(ElementData),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct NodeData {
|
struct NodeData {
|
||||||
|
@ -27,8 +27,13 @@ fn main() {
|
||||||
|
|
||||||
// n.b. span could be better
|
// n.b. span could be better
|
||||||
match n.kind {
|
match n.kind {
|
||||||
box NodeKind::Element(ed) => match ed.kind { //~ ERROR non-exhaustive patterns
|
box NodeKind::Element(ed) => match ed.kind {
|
||||||
box ElementKind::HTMLImageElement(ref d) if d.image.is_some() => { true }
|
//~^ ERROR non-exhaustive patterns
|
||||||
|
//~| NOTE the matched value is of type
|
||||||
|
//~| NOTE match arms with guards don't count towards exhaustivity
|
||||||
|
//~| NOTE pattern `box _` not covered
|
||||||
|
//~| NOTE `Box<ElementKind>` defined here
|
||||||
|
box ElementKind::HTMLImageElement(ref d) if d.image.is_some() => true,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,10 +7,11 @@ LL | box NodeKind::Element(ed) => match ed.kind {
|
||||||
note: `Box<ElementKind>` defined here
|
note: `Box<ElementKind>` defined here
|
||||||
--> $SRC_DIR/alloc/src/boxed.rs:LL:COL
|
--> $SRC_DIR/alloc/src/boxed.rs:LL:COL
|
||||||
= note: the matched value is of type `Box<ElementKind>`
|
= note: the matched value is of type `Box<ElementKind>`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL ~ box ElementKind::HTMLImageElement(ref d) if d.image.is_some() => { true },
|
LL ~ box ElementKind::HTMLImageElement(ref d) if d.image.is_some() => true,
|
||||||
LL + box _ => todo!()
|
LL ~ box _ => todo!(),
|
||||||
|
|
|
|
||||||
|
|
||||||
error: aborting due to previous error
|
error: aborting due to previous error
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
fn main() {
|
fn main() {
|
||||||
match 0 { 1 => () } //~ ERROR non-exhaustive patterns
|
match 0 { 1 => () } //~ ERROR non-exhaustive patterns
|
||||||
match 0 { 0 if false => () } //~ ERROR non-exhaustive patterns
|
match 0 { 0 if false => () } //~ ERROR non-exhaustive patterns
|
||||||
|
//-| NOTE match arms with guards don't count towards exhaustivity
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ LL | match 0 { 0 if false => () }
|
||||||
| ^ pattern `_` not covered
|
| ^ pattern `_` not covered
|
||||||
|
|
|
|
||||||
= note: the matched value is of type `i32`
|
= note: the matched value is of type `i32`
|
||||||
|
= note: match arms with guards don't count towards exhaustivity
|
||||||
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown
|
||||||
|
|
|
|
||||||
LL | match 0 { 0 if false => (), _ => todo!() }
|
LL | match 0 { 0 if false => (), _ => todo!() }
|
||||||
|
|
|
@ -1,18 +1,9 @@
|
||||||
use proc_macro::{LineColumn, Punct, Spacing};
|
use proc_macro::{Punct, Spacing};
|
||||||
|
|
||||||
pub fn test() {
|
pub fn test() {
|
||||||
test_line_column_ord();
|
|
||||||
test_punct_eq();
|
test_punct_eq();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn test_line_column_ord() {
|
|
||||||
let line0_column0 = LineColumn { line: 0, column: 0 };
|
|
||||||
let line0_column1 = LineColumn { line: 0, column: 1 };
|
|
||||||
let line1_column0 = LineColumn { line: 1, column: 0 };
|
|
||||||
assert!(line0_column0 < line0_column1);
|
|
||||||
assert!(line0_column1 < line1_column0);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_punct_eq() {
|
fn test_punct_eq() {
|
||||||
let colon_alone = Punct::new(':', Spacing::Alone);
|
let colon_alone = Punct::new(':', Spacing::Alone);
|
||||||
assert_eq!(colon_alone, ':');
|
assert_eq!(colon_alone, ':');
|
||||||
|
|
|
@ -26,10 +26,9 @@ pub fn assert_span_pos(input: TokenStream) -> TokenStream {
|
||||||
let line: usize = str1.parse().unwrap();
|
let line: usize = str1.parse().unwrap();
|
||||||
let col: usize = str2.parse().unwrap();
|
let col: usize = str2.parse().unwrap();
|
||||||
|
|
||||||
let sp1s = sp1.start();
|
if (line, col) != (sp1.line(), sp1.column()) {
|
||||||
if (line, col) != (sp1s.line, sp1s.column) {
|
|
||||||
let msg = format!("line/column mismatch: ({}, {}) != ({}, {})", line, col,
|
let msg = format!("line/column mismatch: ({}, {}) != ({}, {})", line, col,
|
||||||
sp1s.line, sp1s.column);
|
sp1.line(), sp1.column());
|
||||||
sp1.error(msg).emit();
|
sp1.error(msg).emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,7 @@ fn expect_brace(tokens: &mut token_stream::IntoIter) -> token_stream::IntoIter {
|
||||||
|
|
||||||
fn check_useful_span(token: TokenTree, expected_filename: &str) {
|
fn check_useful_span(token: TokenTree, expected_filename: &str) {
|
||||||
let span = token.span();
|
let span = token.span();
|
||||||
assert!(span.start().column < span.end().column);
|
assert!(span.column() < span.end().column());
|
||||||
|
|
||||||
let source_path = span.source_file().path();
|
let source_path = span.source_file().path();
|
||||||
let filename = source_path.components().last().unwrap();
|
let filename = source_path.components().last().unwrap();
|
||||||
|
|
|
@ -5,6 +5,7 @@ LL | needs_bar::<T>();
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
= note: cannot satisfy `T: Bar`
|
= note: cannot satisfy `T: Bar`
|
||||||
|
= help: the trait `Bar` is implemented for `T`
|
||||||
note: required by a bound in `needs_bar`
|
note: required by a bound in `needs_bar`
|
||||||
--> $DIR/two-projection-param-candidates-are-ambiguous.rs:23:17
|
--> $DIR/two-projection-param-candidates-are-ambiguous.rs:23:17
|
||||||
|
|
|
|
||||||
|
|
10
tests/ui/treat-err-as-bug/panic-causes-oom-112708.rs
Normal file
10
tests/ui/treat-err-as-bug/panic-causes-oom-112708.rs
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
// compile-flags: -Ztreat-err-as-bug
|
||||||
|
// dont-check-failure-status
|
||||||
|
// error-pattern: aborting due to `-Z treat-err-as-bug=1`
|
||||||
|
// dont-check-compiler-stderr
|
||||||
|
// rustc-env:RUST_BACKTRACE=0
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
#[deny(while_true)]
|
||||||
|
while true {}
|
||||||
|
}
|
32
tests/ui/treat-err-as-bug/panic-causes-oom-112708.stderr
Normal file
32
tests/ui/treat-err-as-bug/panic-causes-oom-112708.stderr
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
error: denote infinite loops with `loop { ... }`
|
||||||
|
--> $DIR/panic-causes-oom-112708.rs:13:5
|
||||||
|
|
|
||||||
|
LL | while true {}
|
||||||
|
| ^^^^^^^^^^ help: use `loop`
|
||||||
|
|
|
||||||
|
note: the lint level is defined here
|
||||||
|
--> $DIR/panic-causes-oom-112708.rs:12:12
|
||||||
|
|
|
||||||
|
LL | #[deny(while_true)]
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
|
||||||
|
|
||||||
|
query stack during panic:
|
||||||
|
#0 [early_lint_checks] perform lints prior to macro expansion
|
||||||
|
#1 [hir_crate] getting the crate HIR
|
||||||
|
end of query stack
|
||||||
|
|
||||||
|
error: the compiler unexpectedly panicked. this is a bug.
|
||||||
|
|
||||||
|
query stack during panic:
|
||||||
|
#0 [early_lint_checks] perform lints prior to macro expansion
|
||||||
|
#1 [hir_crate] getting the crate HIR
|
||||||
|
end of query stack
|
||||||
|
|
||||||
|
error: the compiler unexpectedly panicked. this is a bug.
|
||||||
|
|
||||||
|
query stack during panic:
|
||||||
|
#0 [early_lint_checks] perform lints prior to macro expansion
|
||||||
|
#1 [hir_crate] getting the crate HIR
|
||||||
|
end of query stack
|
||||||
|
thread caused non-unwinding panic. aborting.
|
|
@ -581,6 +581,7 @@ types = [
|
||||||
"@lcnr",
|
"@lcnr",
|
||||||
"@oli-obk",
|
"@oli-obk",
|
||||||
"@spastorino",
|
"@spastorino",
|
||||||
|
"@BoxyUwU",
|
||||||
]
|
]
|
||||||
borrowck = [
|
borrowck = [
|
||||||
"@davidtwco",
|
"@davidtwco",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue