1
Fork 0

Auto merge of #138114 - compiler-errors:rollup-7xr4b69, r=compiler-errors

Rollup of 25 pull requests

Successful merges:

 - #135733 (Implement `&pin const self` and `&pin mut self` sugars)
 - #135895 (Document workings of successors more clearly)
 - #136922 (Pattern types: Avoid having to handle an Option for range ends in the type system or the HIR)
 - #137303 (Remove `MaybeForgetReturn` suggestion)
 - #137327 (Undeprecate env::home_dir)
 - #137358 (Match Ergonomics 2024: add context and examples to the unstable book)
 - #137534 ([rustdoc] hide item that is not marked as doc(inline) and whose src is doc(hidden))
 - #137565 (Try to point of macro expansion from resolver and method errors if it involves macro var)
 - #137637 (Check dyn flavor before registering upcast goal on wide pointer cast in MIR typeck)
 - #137643 (Add DWARF test case for non-C-like `repr128` enums)
 - #137744 (Re-add `Clone`-derive on `Thir`)
 - #137758 (fix usage of ty decl macro fragments in attributes)
 - #137764 (Ensure that negative auto impls are always applicable)
 - #137772 (Fix char count in `Display` for `ByteStr`)
 - #137798 (ci: use ubuntu 24 on arm large runner)
 - #137802 (miri native-call support: all previously exposed provenance is accessible to the callee)
 - #137805 (adjust Layout debug printing to match the internal field name)
 - #137808 (Do not require that unsafe fields lack drop glue)
 - #137820 (Clarify why InhabitedPredicate::instantiate_opt exists)
 - #137825 (Provide more context on resolve error caused from incorrect RTN)
 - #137834 (rustc_fluent_macro: use CARGO_CRATE_NAME instead of CARGO_PKG_NAME)
 - #137868 (Add minimal platform support documentation for powerpc-unknown-linux-gnuspe)
 - #137910 (Improve error message for `AsyncFn` trait failure for RPIT)
 - #137920 (interpret/provenance_map: consistently use range_is_empty)
 - #138038 (Update `compiler-builtins` to 0.1.151)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-03-06 23:39:38 +00:00
commit 98a48781fe
169 changed files with 2261 additions and 1107 deletions

View file

@ -1812,7 +1812,7 @@ where
f.debug_struct("Layout") f.debug_struct("Layout")
.field("size", size) .field("size", size)
.field("align", align) .field("align", align)
.field("abi", backend_repr) .field("backend_repr", backend_repr)
.field("fields", fields) .field("fields", fields)
.field("largest_niche", largest_niche) .field("largest_niche", largest_niche)
.field("uninhabited", uninhabited) .field("uninhabited", uninhabited)

View file

@ -2641,6 +2641,8 @@ pub enum SelfKind {
Value(Mutability), Value(Mutability),
/// `&'lt self`, `&'lt mut self` /// `&'lt self`, `&'lt mut self`
Region(Option<Lifetime>, Mutability), Region(Option<Lifetime>, Mutability),
/// `&'lt pin const self`, `&'lt pin mut self`
Pinned(Option<Lifetime>, Mutability),
/// `self: TYPE`, `mut self: TYPE` /// `self: TYPE`, `mut self: TYPE`
Explicit(P<Ty>, Mutability), Explicit(P<Ty>, Mutability),
} }
@ -2650,6 +2652,8 @@ impl SelfKind {
match self { match self {
SelfKind::Region(None, mutbl) => mutbl.ref_prefix_str().to_string(), SelfKind::Region(None, mutbl) => mutbl.ref_prefix_str().to_string(),
SelfKind::Region(Some(lt), mutbl) => format!("&{lt} {}", mutbl.prefix_str()), SelfKind::Region(Some(lt), mutbl) => format!("&{lt} {}", mutbl.prefix_str()),
SelfKind::Pinned(None, mutbl) => format!("&pin {}", mutbl.ptr_str()),
SelfKind::Pinned(Some(lt), mutbl) => format!("&{lt} pin {}", mutbl.ptr_str()),
SelfKind::Value(_) | SelfKind::Explicit(_, _) => { SelfKind::Value(_) | SelfKind::Explicit(_, _) => {
unreachable!("if we had an explicit self, we wouldn't be here") unreachable!("if we had an explicit self, we wouldn't be here")
} }
@ -2666,11 +2670,13 @@ impl Param {
if ident.name == kw::SelfLower { if ident.name == kw::SelfLower {
return match self.ty.kind { return match self.ty.kind {
TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))), TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
TyKind::Ref(lt, MutTy { ref ty, mutbl }) TyKind::Ref(lt, MutTy { ref ty, mutbl }) if ty.kind.is_implicit_self() => {
| TyKind::PinnedRef(lt, MutTy { ref ty, mutbl }) Some(respan(self.pat.span, SelfKind::Region(lt, mutbl)))
}
TyKind::PinnedRef(lt, MutTy { ref ty, mutbl })
if ty.kind.is_implicit_self() => if ty.kind.is_implicit_self() =>
{ {
Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) Some(respan(self.pat.span, SelfKind::Pinned(lt, mutbl)))
} }
_ => Some(respan( _ => Some(respan(
self.pat.span.to(self.ty.span), self.pat.span.to(self.ty.span),
@ -2712,6 +2718,15 @@ impl Param {
tokens: None, tokens: None,
}), }),
), ),
SelfKind::Pinned(lt, mutbl) => (
mutbl,
P(Ty {
id: DUMMY_NODE_ID,
kind: TyKind::PinnedRef(lt, MutTy { ty: infer_ty, mutbl }),
span,
tokens: None,
}),
),
}; };
Param { Param {
attrs, attrs,

View file

@ -136,6 +136,7 @@ struct LoweringContext<'a, 'hir> {
allow_try_trait: Arc<[Symbol]>, allow_try_trait: Arc<[Symbol]>,
allow_gen_future: Arc<[Symbol]>, allow_gen_future: Arc<[Symbol]>,
allow_pattern_type: Arc<[Symbol]>,
allow_async_iterator: Arc<[Symbol]>, allow_async_iterator: Arc<[Symbol]>,
allow_for_await: Arc<[Symbol]>, allow_for_await: Arc<[Symbol]>,
allow_async_fn_traits: Arc<[Symbol]>, allow_async_fn_traits: Arc<[Symbol]>,
@ -176,6 +177,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
impl_trait_defs: Vec::new(), impl_trait_defs: Vec::new(),
impl_trait_bounds: Vec::new(), impl_trait_bounds: Vec::new(),
allow_try_trait: [sym::try_trait_v2, sym::yeet_desugar_details].into(), allow_try_trait: [sym::try_trait_v2, sym::yeet_desugar_details].into(),
allow_pattern_type: [sym::pattern_types, sym::pattern_type_range_trait].into(),
allow_gen_future: if tcx.features().async_fn_track_caller() { allow_gen_future: if tcx.features().async_fn_track_caller() {
[sym::gen_future, sym::closure_track_caller].into() [sym::gen_future, sym::closure_track_caller].into()
} else { } else {
@ -926,7 +928,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
if let Some(first_char) = constraint.ident.as_str().chars().next() if let Some(first_char) = constraint.ident.as_str().chars().next()
&& first_char.is_ascii_lowercase() && first_char.is_ascii_lowercase()
{ {
tracing::info!(?data, ?data.inputs);
let err = match (&data.inputs[..], &data.output) { let err = match (&data.inputs[..], &data.output) {
([_, ..], FnRetTy::Default(_)) => { ([_, ..], FnRetTy::Default(_)) => {
errors::BadReturnTypeNotation::Inputs { span: data.inputs_span } errors::BadReturnTypeNotation::Inputs { span: data.inputs_span }
@ -1365,7 +1366,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
} }
TyKind::Pat(ty, pat) => { TyKind::Pat(ty, pat) => {
hir::TyKind::Pat(self.lower_ty(ty, itctx), self.lower_ty_pat(pat)) hir::TyKind::Pat(self.lower_ty(ty, itctx), self.lower_ty_pat(pat, ty.span))
} }
TyKind::MacCall(_) => { TyKind::MacCall(_) => {
span_bug!(t.span, "`TyKind::MacCall` should have been expanded by now") span_bug!(t.span, "`TyKind::MacCall` should have been expanded by now")

View file

@ -3,11 +3,11 @@ use std::sync::Arc;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::*; use rustc_ast::*;
use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir; use rustc_hir::def::{DefKind, Res};
use rustc_hir::def::Res; use rustc_hir::{self as hir, LangItem};
use rustc_middle::span_bug; use rustc_middle::span_bug;
use rustc_span::source_map::{Spanned, respan}; use rustc_span::source_map::{Spanned, respan};
use rustc_span::{Ident, Span}; use rustc_span::{DesugaringKind, Ident, Span, kw};
use super::errors::{ use super::errors::{
ArbitraryExpressionInPattern, ExtraDoubleDot, MisplacedDoubleDot, SubTupleBinding, ArbitraryExpressionInPattern, ExtraDoubleDot, MisplacedDoubleDot, SubTupleBinding,
@ -430,22 +430,124 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.arena.alloc(hir::PatExpr { hir_id: self.lower_node_id(expr.id), span, kind }) self.arena.alloc(hir::PatExpr { hir_id: self.lower_node_id(expr.id), span, kind })
} }
pub(crate) fn lower_ty_pat(&mut self, pattern: &TyPat) -> &'hir hir::TyPat<'hir> { pub(crate) fn lower_ty_pat(
self.arena.alloc(self.lower_ty_pat_mut(pattern)) &mut self,
pattern: &TyPat,
base_type: Span,
) -> &'hir hir::TyPat<'hir> {
self.arena.alloc(self.lower_ty_pat_mut(pattern, base_type))
} }
fn lower_ty_pat_mut(&mut self, pattern: &TyPat) -> hir::TyPat<'hir> { fn lower_ty_pat_mut(&mut self, pattern: &TyPat, base_type: Span) -> hir::TyPat<'hir> {
// loop here to avoid recursion // loop here to avoid recursion
let pat_hir_id = self.lower_node_id(pattern.id); let pat_hir_id = self.lower_node_id(pattern.id);
let node = match &pattern.kind { let node = match &pattern.kind {
TyPatKind::Range(e1, e2, Spanned { node: end, .. }) => hir::TyPatKind::Range( TyPatKind::Range(e1, e2, Spanned { node: end, span }) => hir::TyPatKind::Range(
e1.as_deref().map(|e| self.lower_anon_const_to_const_arg(e)), e1.as_deref().map(|e| self.lower_anon_const_to_const_arg(e)).unwrap_or_else(|| {
e2.as_deref().map(|e| self.lower_anon_const_to_const_arg(e)), self.lower_ty_pat_range_end(
self.lower_range_end(end, e2.is_some()), hir::LangItem::RangeMin,
span.shrink_to_lo(),
base_type,
)
}),
e2.as_deref()
.map(|e| match end {
RangeEnd::Included(..) => self.lower_anon_const_to_const_arg(e),
RangeEnd::Excluded => self.lower_excluded_range_end(e),
})
.unwrap_or_else(|| {
self.lower_ty_pat_range_end(
hir::LangItem::RangeMax,
span.shrink_to_hi(),
base_type,
)
}),
), ),
TyPatKind::Err(guar) => hir::TyPatKind::Err(*guar), TyPatKind::Err(guar) => hir::TyPatKind::Err(*guar),
}; };
hir::TyPat { hir_id: pat_hir_id, kind: node, span: self.lower_span(pattern.span) } hir::TyPat { hir_id: pat_hir_id, kind: node, span: self.lower_span(pattern.span) }
} }
/// Lowers the range end of an exclusive range (`2..5`) to an inclusive range 2..=(5 - 1).
/// This way the type system doesn't have to handle the distinction between inclusive/exclusive ranges.
fn lower_excluded_range_end(&mut self, e: &AnonConst) -> &'hir hir::ConstArg<'hir> {
let span = self.lower_span(e.value.span);
let unstable_span = self.mark_span_with_reason(
DesugaringKind::PatTyRange,
span,
Some(Arc::clone(&self.allow_pattern_type)),
);
let anon_const = self.with_new_scopes(span, |this| {
let def_id = this.local_def_id(e.id);
let hir_id = this.lower_node_id(e.id);
let body = this.lower_body(|this| {
// Need to use a custom function as we can't just subtract `1` from a `char`.
let kind = hir::ExprKind::Path(this.make_lang_item_qpath(
hir::LangItem::RangeSub,
unstable_span,
None,
));
let fn_def = this.arena.alloc(hir::Expr { hir_id: this.next_id(), kind, span });
let args = this.arena.alloc([this.lower_expr_mut(&e.value)]);
(
&[],
hir::Expr {
hir_id: this.next_id(),
kind: hir::ExprKind::Call(fn_def, args),
span,
},
)
});
hir::AnonConst { def_id, hir_id, body, span }
});
self.arena.alloc(hir::ConstArg {
hir_id: self.next_id(),
kind: hir::ConstArgKind::Anon(self.arena.alloc(anon_const)),
})
}
/// When a range has no end specified (`1..` or `1..=`) or no start specified (`..5` or `..=5`),
/// we instead use a constant of the MAX/MIN of the type.
/// This way the type system does not have to handle the lack of a start/end.
fn lower_ty_pat_range_end(
&mut self,
lang_item: LangItem,
span: Span,
base_type: Span,
) -> &'hir hir::ConstArg<'hir> {
let parent_def_id = self.current_hir_id_owner.def_id;
let node_id = self.next_node_id();
// Add a definition for the in-band const def.
// We're generating a range end that didn't exist in the AST,
// so the def collector didn't create the def ahead of time. That's why we have to do
// it here.
let def_id = self.create_def(parent_def_id, node_id, kw::Empty, DefKind::AnonConst, span);
let hir_id = self.lower_node_id(node_id);
let unstable_span = self.mark_span_with_reason(
DesugaringKind::PatTyRange,
self.lower_span(span),
Some(Arc::clone(&self.allow_pattern_type)),
);
let span = self.lower_span(base_type);
let path_expr = hir::Expr {
hir_id: self.next_id(),
kind: hir::ExprKind::Path(self.make_lang_item_qpath(lang_item, unstable_span, None)),
span,
};
let ct = self.with_new_scopes(span, |this| {
self.arena.alloc(hir::AnonConst {
def_id,
hir_id,
body: this.lower_body(|_this| (&[], path_expr)),
span,
})
});
let hir_id = self.next_id();
self.arena.alloc(hir::ConstArg { kind: hir::ConstArgKind::Anon(ct), hir_id })
}
} }

View file

@ -268,7 +268,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} }
GenericArgs::Parenthesized(data) => match generic_args_mode { GenericArgs::Parenthesized(data) => match generic_args_mode {
GenericArgsMode::ReturnTypeNotation => { GenericArgsMode::ReturnTypeNotation => {
tracing::info!(?data, ?data.inputs);
let err = match (&data.inputs[..], &data.output) { let err = match (&data.inputs[..], &data.output) {
([_, ..], FnRetTy::Default(_)) => { ([_, ..], FnRetTy::Default(_)) => {
BadReturnTypeNotation::Inputs { span: data.inputs_span } BadReturnTypeNotation::Inputs { span: data.inputs_span }

View file

@ -1783,6 +1783,13 @@ impl<'a> State<'a> {
self.print_mutability(*m, false); self.print_mutability(*m, false);
self.word("self") self.word("self")
} }
SelfKind::Pinned(lt, m) => {
self.word("&");
self.print_opt_lifetime(lt);
self.word("pin ");
self.print_mutability(*m, true);
self.word("self")
}
SelfKind::Explicit(typ, m) => { SelfKind::Explicit(typ, m) => {
self.print_mutability(*m, false); self.print_mutability(*m, false);
self.word("self"); self.word("self");

View file

@ -473,6 +473,15 @@ impl<'a> MetaItemListParserContext<'a> {
{ {
self.inside_delimiters.next(); self.inside_delimiters.next();
return Some(MetaItemOrLitParser::Lit(lit)); return Some(MetaItemOrLitParser::Lit(lit));
} else if let Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) =
self.inside_delimiters.peek()
{
self.inside_delimiters.next();
return MetaItemListParserContext {
inside_delimiters: inner_tokens.iter().peekable(),
dcx: self.dcx,
}
.next();
} }
// or a path. // or a path.

View file

@ -2120,8 +2120,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// //
// Note that other checks (such as denying `dyn Send` -> `dyn // Note that other checks (such as denying `dyn Send` -> `dyn
// Debug`) are in `rustc_hir_typeck`. // Debug`) are in `rustc_hir_typeck`.
if let ty::Dynamic(src_tty, _src_lt, _) = *src_tail.kind() if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = *src_tail.kind()
&& let ty::Dynamic(dst_tty, dst_lt, _) = *dst_tail.kind() && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = *dst_tail.kind()
&& src_tty.principal().is_some() && src_tty.principal().is_some()
&& dst_tty.principal().is_some() && dst_tty.principal().is_some()
{ {

View file

@ -16,8 +16,8 @@ index 7165c3e48af..968552ad435 100644
[dependencies] [dependencies]
core = { path = "../core", public = true } core = { path = "../core", public = true }
-compiler_builtins = { version = "=0.1.150", features = ['rustc-dep-of-std'] } -compiler_builtins = { version = "=0.1.151", features = ['rustc-dep-of-std'] }
+compiler_builtins = { version = "=0.1.150", features = ['rustc-dep-of-std', 'no-f16-f128'] } +compiler_builtins = { version = "=0.1.151", features = ['rustc-dep-of-std', 'no-f16-f128'] }
[dev-dependencies] [dev-dependencies]
rand = { version = "0.8.5", default-features = false, features = ["alloc"] } rand = { version = "0.8.5", default-features = false, features = ["alloc"] }

View file

@ -955,18 +955,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// Handle the effect an FFI call might have on the state of allocations. /// Handle the effect an FFI call might have on the state of allocations.
/// This overapproximates the modifications which external code might make to memory: /// This overapproximates the modifications which external code might make to memory:
/// We set all reachable allocations as initialized, mark all provenances as exposed /// We set all reachable allocations as initialized, mark all reachable provenances as exposed
/// and overwrite them with `Provenance::WILDCARD`. /// and overwrite them with `Provenance::WILDCARD`.
pub fn prepare_for_native_call( ///
&mut self, /// The allocations in `ids` are assumed to be already exposed.
id: AllocId, pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
initial_prov: M::Provenance,
) -> InterpResult<'tcx> {
// Expose provenance of the root allocation.
M::expose_provenance(self, initial_prov)?;
let mut done = FxHashSet::default(); let mut done = FxHashSet::default();
let mut todo = vec![id]; let mut todo = ids;
while let Some(id) = todo.pop() { while let Some(id) = todo.pop() {
if !done.insert(id) { if !done.insert(id) {
// We already saw this allocation before, don't process it again. // We already saw this allocation before, don't process it again.

View file

@ -1,3 +1,5 @@
use std::alloc::Allocator;
#[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \ #[rustc_on_unimplemented(message = "`{Self}` doesn't implement `DynSend`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")] Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`")]
// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()` // This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
@ -28,8 +30,8 @@ impls_dyn_send_neg!(
[*const T where T: ?Sized] [*const T where T: ?Sized]
[*mut T where T: ?Sized] [*mut T where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized] [std::ptr::NonNull<T> where T: ?Sized]
[std::rc::Rc<T> where T: ?Sized] [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
[std::rc::Weak<T> where T: ?Sized] [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
[std::sync::MutexGuard<'_, T> where T: ?Sized] [std::sync::MutexGuard<'_, T> where T: ?Sized]
[std::sync::RwLockReadGuard<'_, T> where T: ?Sized] [std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
[std::sync::RwLockWriteGuard<'_, T> where T: ?Sized] [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
@ -96,8 +98,8 @@ impls_dyn_sync_neg!(
[std::cell::RefCell<T> where T: ?Sized] [std::cell::RefCell<T> where T: ?Sized]
[std::cell::UnsafeCell<T> where T: ?Sized] [std::cell::UnsafeCell<T> where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized] [std::ptr::NonNull<T> where T: ?Sized]
[std::rc::Rc<T> where T: ?Sized] [std::rc::Rc<T, A> where T: ?Sized, A: Allocator]
[std::rc::Weak<T> where T: ?Sized] [std::rc::Weak<T, A> where T: ?Sized, A: Allocator]
[std::cell::OnceCell<T> where T] [std::cell::OnceCell<T> where T]
[std::sync::mpsc::Receiver<T> where T] [std::sync::mpsc::Receiver<T> where T]
[std::sync::mpsc::Sender<T> where T] [std::sync::mpsc::Sender<T> where T]

View file

@ -626,7 +626,6 @@ pub enum StashKey {
MaybeFruTypo, MaybeFruTypo,
CallAssocMethod, CallAssocMethod,
AssociatedTypeSuggestion, AssociatedTypeSuggestion,
MaybeForgetReturn,
/// Query cycle detected, stashing in favor of a better error. /// Query cycle detected, stashing in favor of a better error.
Cycle, Cycle,
UndeterminedMacroResolution, UndeterminedMacroResolution,

View file

@ -78,8 +78,8 @@ fn failed(crate_name: &Ident) -> proc_macro::TokenStream {
/// See [rustc_fluent_macro::fluent_messages]. /// See [rustc_fluent_macro::fluent_messages].
pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::TokenStream { pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let crate_name = std::env::var("CARGO_PKG_NAME") let crate_name = std::env::var("CARGO_CRATE_NAME")
// If `CARGO_PKG_NAME` is missing, then we're probably running in a test, so use // If `CARGO_CRATE_NAME` is missing, then we're probably running in a test, so use
// `no_crate`. // `no_crate`.
.unwrap_or_else(|_| "no_crate".to_string()) .unwrap_or_else(|_| "no_crate".to_string())
.replace("rustc_", ""); .replace("rustc_", "");

View file

@ -1600,7 +1600,7 @@ pub struct PatField<'hir> {
pub span: Span, pub span: Span,
} }
#[derive(Copy, Clone, PartialEq, Debug, HashStable_Generic)] #[derive(Copy, Clone, PartialEq, Debug, HashStable_Generic, Hash, Eq, Encodable, Decodable)]
pub enum RangeEnd { pub enum RangeEnd {
Included, Included,
Excluded, Excluded,
@ -1668,7 +1668,7 @@ pub enum PatExprKind<'hir> {
#[derive(Debug, Clone, Copy, HashStable_Generic)] #[derive(Debug, Clone, Copy, HashStable_Generic)]
pub enum TyPatKind<'hir> { pub enum TyPatKind<'hir> {
/// A range pattern (e.g., `1..=2` or `1..2`). /// A range pattern (e.g., `1..=2` or `1..2`).
Range(Option<&'hir ConstArg<'hir>>, Option<&'hir ConstArg<'hir>>, RangeEnd), Range(&'hir ConstArg<'hir>, &'hir ConstArg<'hir>),
/// A placeholder for a pattern that wasn't well formed in some way. /// A placeholder for a pattern that wasn't well formed in some way.
Err(ErrorGuaranteed), Err(ErrorGuaranteed),

View file

@ -708,9 +708,9 @@ pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm<'v>) -> V::Res
pub fn walk_ty_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v TyPat<'v>) -> V::Result { pub fn walk_ty_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v TyPat<'v>) -> V::Result {
try_visit!(visitor.visit_id(pattern.hir_id)); try_visit!(visitor.visit_id(pattern.hir_id));
match pattern.kind { match pattern.kind {
TyPatKind::Range(lower_bound, upper_bound, _) => { TyPatKind::Range(lower_bound, upper_bound) => {
visit_opt!(visitor, visit_const_arg_unambig, lower_bound); try_visit!(visitor.visit_const_arg_unambig(lower_bound));
visit_opt!(visitor, visit_const_arg_unambig, upper_bound); try_visit!(visitor.visit_const_arg_unambig(upper_bound));
} }
TyPatKind::Err(_) => (), TyPatKind::Err(_) => (),
} }

View file

@ -418,6 +418,9 @@ language_item_table! {
Range, sym::Range, range_struct, Target::Struct, GenericRequirement::None; Range, sym::Range, range_struct, Target::Struct, GenericRequirement::None;
RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, Target::Struct, GenericRequirement::None; RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, Target::Struct, GenericRequirement::None;
RangeTo, sym::RangeTo, range_to_struct, Target::Struct, GenericRequirement::None; RangeTo, sym::RangeTo, range_to_struct, Target::Struct, GenericRequirement::None;
RangeMax, sym::RangeMax, range_max, Target::AssocConst, GenericRequirement::Exact(0);
RangeMin, sym::RangeMin, range_min, Target::AssocConst, GenericRequirement::Exact(0);
RangeSub, sym::RangeSub, range_sub, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::Exact(0);
// `new_range` types that are `Copy + IntoIterator` // `new_range` types that are `Copy + IntoIterator`
RangeFromCopy, sym::RangeFromCopy, range_from_copy_struct, Target::Struct, GenericRequirement::None; RangeFromCopy, sym::RangeFromCopy, range_from_copy_struct, Target::Struct, GenericRequirement::None;

View file

@ -244,9 +244,6 @@ hir_analysis_inherent_ty_outside_relevant = cannot define inherent `impl` for a
.help = consider moving this inherent impl into the crate defining the type if possible .help = consider moving this inherent impl into the crate defining the type if possible
.span_help = alternatively add `#[rustc_allow_incoherent_impl]` to the relevant impl items .span_help = alternatively add `#[rustc_allow_incoherent_impl]` to the relevant impl items
hir_analysis_invalid_base_type = `{$ty}` is not a valid base type for range patterns
.note = range patterns only support integers
hir_analysis_invalid_generic_receiver_ty = invalid generic `self` parameter type: `{$receiver_ty}` hir_analysis_invalid_generic_receiver_ty = invalid generic `self` parameter type: `{$receiver_ty}`
.note = type of `self` must not be a method generic parameter type .note = type of `self` must not be a method generic parameter type
@ -278,13 +275,6 @@ hir_analysis_invalid_union_field =
hir_analysis_invalid_union_field_sugg = hir_analysis_invalid_union_field_sugg =
wrap the field type in `ManuallyDrop<...>` wrap the field type in `ManuallyDrop<...>`
hir_analysis_invalid_unsafe_field =
field must implement `Copy` or be wrapped in `ManuallyDrop<...>` to be unsafe
.note = unsafe fields must not have drop side-effects, which is currently enforced via either `Copy` or `ManuallyDrop<...>`
hir_analysis_invalid_unsafe_field_sugg =
wrap the field type in `ManuallyDrop<...>`
hir_analysis_late_bound_const_in_apit = `impl Trait` can only mention const parameters from an fn or impl hir_analysis_late_bound_const_in_apit = `impl Trait` can only mention const parameters from an fn or impl
.label = const parameter declared here .label = const parameter declared here
@ -620,6 +610,8 @@ hir_analysis_variances_of = {$variances}
hir_analysis_where_clause_on_main = `main` function is not allowed to have a `where` clause hir_analysis_where_clause_on_main = `main` function is not allowed to have a `where` clause
.label = `main` cannot have a `where` clause .label = `main` cannot have a `where` clause
hir_analysis_within_macro = due to this macro variable
hir_analysis_wrong_number_of_generic_arguments_to_intrinsic = hir_analysis_wrong_number_of_generic_arguments_to_intrinsic =
intrinsic has wrong number of {$descr} parameters: found {$found}, expected {$expected} intrinsic has wrong number of {$descr} parameters: found {$found}, expected {$expected}
.label = expected {$expected} {$descr} {$expected -> .label = expected {$expected} {$descr} {$expected ->

View file

@ -1,8 +1,15 @@
//! This module contains methods that assist in checking that impls are general
//! enough, i.e. that they always apply to every valid instantaiton of the ADT
//! they're implemented for.
//!
//! This is necessary for `Drop` and negative impls to be well-formed.
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_errors::codes::*; use rustc_errors::codes::*;
use rustc_errors::{ErrorGuaranteed, struct_span_code_err}; use rustc_errors::{ErrorGuaranteed, struct_span_code_err};
use rustc_infer::infer::{RegionResolutionError, TyCtxtInferExt}; use rustc_infer::infer::{RegionResolutionError, TyCtxtInferExt};
use rustc_infer::traits::{ObligationCause, ObligationCauseCode}; use rustc_infer::traits::{ObligationCause, ObligationCauseCode};
use rustc_middle::span_bug;
use rustc_middle::ty::util::CheckRegions; use rustc_middle::ty::util::CheckRegions;
use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypingMode}; use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt, TypingMode};
use rustc_trait_selection::regions::InferCtxtRegionExt; use rustc_trait_selection::regions::InferCtxtRegionExt;
@ -27,11 +34,12 @@ use crate::hir::def_id::{DefId, LocalDefId};
/// 3. Any bounds on the generic parameters must be reflected in the /// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e. /// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> { ... }`). /// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> { ... }`).
///
pub(crate) fn check_drop_impl( pub(crate) fn check_drop_impl(
tcx: TyCtxt<'_>, tcx: TyCtxt<'_>,
drop_impl_did: DefId, drop_impl_did: DefId,
) -> Result<(), ErrorGuaranteed> { ) -> Result<(), ErrorGuaranteed> {
let drop_impl_did = drop_impl_did.expect_local();
match tcx.impl_polarity(drop_impl_did) { match tcx.impl_polarity(drop_impl_did) {
ty::ImplPolarity::Positive => {} ty::ImplPolarity::Positive => {}
ty::ImplPolarity::Negative => { ty::ImplPolarity::Negative => {
@ -45,55 +53,107 @@ pub(crate) fn check_drop_impl(
})); }));
} }
} }
let dtor_self_type = tcx.type_of(drop_impl_did).instantiate_identity();
match dtor_self_type.kind() { tcx.ensure_ok().orphan_check_impl(drop_impl_did)?;
let dtor_impl_trait_ref = tcx.impl_trait_ref(drop_impl_did).unwrap().instantiate_identity();
match dtor_impl_trait_ref.self_ty().kind() {
ty::Adt(adt_def, adt_to_impl_args) => { ty::Adt(adt_def, adt_to_impl_args) => {
ensure_drop_params_and_item_params_correspond( ensure_impl_params_and_item_params_correspond(
tcx, tcx,
drop_impl_did.expect_local(), drop_impl_did,
adt_def.did(), adt_def.did(),
adt_to_impl_args, adt_to_impl_args,
)?; )?;
ensure_drop_predicates_are_implied_by_item_defn( ensure_impl_predicates_are_implied_by_item_defn(
tcx, tcx,
drop_impl_did.expect_local(), drop_impl_did,
adt_def.did().expect_local(), adt_def.did(),
adt_to_impl_args, adt_to_impl_args,
) )
} }
_ => { _ => {
// Destructors only work on nominal types. This was span_bug!(tcx.def_span(drop_impl_did), "incoherent impl of Drop");
// already checked by coherence, but compilation may
// not have been terminated.
let span = tcx.def_span(drop_impl_did);
let reported = tcx.dcx().span_delayed_bug(
span,
format!("should have been rejected by coherence check: {dtor_self_type}"),
);
Err(reported)
} }
} }
} }
fn ensure_drop_params_and_item_params_correspond<'tcx>( pub(crate) fn check_negative_auto_trait_impl<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
drop_impl_did: LocalDefId, impl_def_id: LocalDefId,
self_type_did: DefId, impl_trait_ref: ty::TraitRef<'tcx>,
polarity: ty::ImplPolarity,
) -> Result<(), ErrorGuaranteed> {
let ty::ImplPolarity::Negative = polarity else {
return Ok(());
};
if !tcx.trait_is_auto(impl_trait_ref.def_id) {
return Ok(());
}
if tcx.defaultness(impl_def_id).is_default() {
tcx.dcx().span_delayed_bug(tcx.def_span(impl_def_id), "default impl cannot be negative");
}
tcx.ensure_ok().orphan_check_impl(impl_def_id)?;
match impl_trait_ref.self_ty().kind() {
ty::Adt(adt_def, adt_to_impl_args) => {
ensure_impl_params_and_item_params_correspond(
tcx,
impl_def_id,
adt_def.did(),
adt_to_impl_args,
)?;
ensure_impl_predicates_are_implied_by_item_defn(
tcx,
impl_def_id,
adt_def.did(),
adt_to_impl_args,
)
}
_ => {
if tcx.features().auto_traits() {
// NOTE: We ignore the applicability check for negative auto impls
// defined in libcore. In the (almost impossible) future where we
// stabilize auto impls, then the proper applicability check MUST
// be implemented here to handle non-ADT rigid types.
Ok(())
} else {
span_bug!(tcx.def_span(impl_def_id), "incoherent impl of negative auto trait");
}
}
}
}
fn ensure_impl_params_and_item_params_correspond<'tcx>(
tcx: TyCtxt<'tcx>,
impl_def_id: LocalDefId,
adt_def_id: DefId,
adt_to_impl_args: GenericArgsRef<'tcx>, adt_to_impl_args: GenericArgsRef<'tcx>,
) -> Result<(), ErrorGuaranteed> { ) -> Result<(), ErrorGuaranteed> {
let Err(arg) = tcx.uses_unique_generic_params(adt_to_impl_args, CheckRegions::OnlyParam) else { let Err(arg) = tcx.uses_unique_generic_params(adt_to_impl_args, CheckRegions::OnlyParam) else {
return Ok(()); return Ok(());
}; };
let drop_impl_span = tcx.def_span(drop_impl_did); let impl_span = tcx.def_span(impl_def_id);
let item_span = tcx.def_span(self_type_did); let item_span = tcx.def_span(adt_def_id);
let self_descr = tcx.def_descr(self_type_did); let self_descr = tcx.def_descr(adt_def_id);
let polarity = match tcx.impl_polarity(impl_def_id) {
ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
ty::ImplPolarity::Negative => "!",
};
let trait_name = tcx
.item_name(tcx.trait_id_of_impl(impl_def_id.to_def_id()).expect("expected impl of trait"));
let mut err = struct_span_code_err!( let mut err = struct_span_code_err!(
tcx.dcx(), tcx.dcx(),
drop_impl_span, impl_span,
E0366, E0366,
"`Drop` impls cannot be specialized" "`{polarity}{trait_name}` impls cannot be specialized",
); );
match arg { match arg {
ty::util::NotUniqueParam::DuplicateParam(arg) => { ty::util::NotUniqueParam::DuplicateParam(arg) => {
@ -116,17 +176,22 @@ fn ensure_drop_params_and_item_params_correspond<'tcx>(
/// Confirms that all predicates defined on the `Drop` impl (`drop_impl_def_id`) are able to be /// Confirms that all predicates defined on the `Drop` impl (`drop_impl_def_id`) are able to be
/// proven from within `adt_def_id`'s environment. I.e. all the predicates on the impl are /// proven from within `adt_def_id`'s environment. I.e. all the predicates on the impl are
/// implied by the ADT being well formed. /// implied by the ADT being well formed.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>( fn ensure_impl_predicates_are_implied_by_item_defn<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
drop_impl_def_id: LocalDefId, impl_def_id: LocalDefId,
adt_def_id: LocalDefId, adt_def_id: DefId,
adt_to_impl_args: GenericArgsRef<'tcx>, adt_to_impl_args: GenericArgsRef<'tcx>,
) -> Result<(), ErrorGuaranteed> { ) -> Result<(), ErrorGuaranteed> {
let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis()); let infcx = tcx.infer_ctxt().build(TypingMode::non_body_analysis());
let ocx = ObligationCtxt::new_with_diagnostics(&infcx); let ocx = ObligationCtxt::new_with_diagnostics(&infcx);
let impl_span = tcx.def_span(drop_impl_def_id.to_def_id()); let impl_span = tcx.def_span(impl_def_id.to_def_id());
let trait_name = tcx
.item_name(tcx.trait_id_of_impl(impl_def_id.to_def_id()).expect("expected impl of trait"));
let polarity = match tcx.impl_polarity(impl_def_id) {
ty::ImplPolarity::Positive | ty::ImplPolarity::Reservation => "",
ty::ImplPolarity::Negative => "!",
};
// Take the param-env of the adt and instantiate the args that show up in // Take the param-env of the adt and instantiate the args that show up in
// the implementation's self type. This gives us the assumptions that the // the implementation's self type. This gives us the assumptions that the
// self ty of the implementation is allowed to know just from it being a // self ty of the implementation is allowed to know just from it being a
@ -145,17 +210,21 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
let adt_env = let adt_env =
ty::EarlyBinder::bind(tcx.param_env(adt_def_id)).instantiate(tcx, adt_to_impl_args); ty::EarlyBinder::bind(tcx.param_env(adt_def_id)).instantiate(tcx, adt_to_impl_args);
let fresh_impl_args = infcx.fresh_args_for_item(impl_span, drop_impl_def_id.to_def_id()); let fresh_impl_args = infcx.fresh_args_for_item(impl_span, impl_def_id.to_def_id());
let fresh_adt_ty = let fresh_adt_ty =
tcx.impl_trait_ref(drop_impl_def_id).unwrap().instantiate(tcx, fresh_impl_args).self_ty(); tcx.impl_trait_ref(impl_def_id).unwrap().instantiate(tcx, fresh_impl_args).self_ty();
ocx.eq(&ObligationCause::dummy_with_span(impl_span), adt_env, fresh_adt_ty, impl_adt_ty) ocx.eq(&ObligationCause::dummy_with_span(impl_span), adt_env, fresh_adt_ty, impl_adt_ty)
.unwrap(); .expect("equating fully generic trait ref should never fail");
for (clause, span) in tcx.predicates_of(drop_impl_def_id).instantiate(tcx, fresh_impl_args) { for (clause, span) in tcx.predicates_of(impl_def_id).instantiate(tcx, fresh_impl_args) {
let normalize_cause = traits::ObligationCause::misc(span, adt_def_id); let normalize_cause = traits::ObligationCause::misc(span, impl_def_id);
let pred = ocx.normalize(&normalize_cause, adt_env, clause); let pred = ocx.normalize(&normalize_cause, adt_env, clause);
let cause = traits::ObligationCause::new(span, adt_def_id, ObligationCauseCode::DropImpl); let cause = traits::ObligationCause::new(
span,
impl_def_id,
ObligationCauseCode::AlwaysApplicableImpl,
);
ocx.register_obligation(traits::Obligation::new(tcx, cause, adt_env, pred)); ocx.register_obligation(traits::Obligation::new(tcx, cause, adt_env, pred));
} }
@ -173,13 +242,13 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
let root_predicate = error.root_obligation.predicate; let root_predicate = error.root_obligation.predicate;
if root_predicates.insert(root_predicate) { if root_predicates.insert(root_predicate) {
let item_span = tcx.def_span(adt_def_id); let item_span = tcx.def_span(adt_def_id);
let self_descr = tcx.def_descr(adt_def_id.to_def_id()); let self_descr = tcx.def_descr(adt_def_id);
guar = Some( guar = Some(
struct_span_code_err!( struct_span_code_err!(
tcx.dcx(), tcx.dcx(),
error.root_obligation.cause.span, error.root_obligation.cause.span,
E0367, E0367,
"`Drop` impl requires `{root_predicate}` \ "`{polarity}{trait_name}` impl requires `{root_predicate}` \
but the {self_descr} it is implemented for does not", but the {self_descr} it is implemented for does not",
) )
.with_span_note(item_span, "the implementor must specify the same requirement") .with_span_note(item_span, "the implementor must specify the same requirement")
@ -190,12 +259,12 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
return Err(guar.unwrap()); return Err(guar.unwrap());
} }
let errors = ocx.infcx.resolve_regions(adt_def_id, adt_env, []); let errors = ocx.infcx.resolve_regions(impl_def_id, adt_env, []);
if !errors.is_empty() { if !errors.is_empty() {
let mut guar = None; let mut guar = None;
for error in errors { for error in errors {
let item_span = tcx.def_span(adt_def_id); let item_span = tcx.def_span(adt_def_id);
let self_descr = tcx.def_descr(adt_def_id.to_def_id()); let self_descr = tcx.def_descr(adt_def_id);
let outlives = match error { let outlives = match error {
RegionResolutionError::ConcreteFailure(_, a, b) => format!("{b}: {a}"), RegionResolutionError::ConcreteFailure(_, a, b) => format!("{b}: {a}"),
RegionResolutionError::GenericBoundFailure(_, generic, r) => { RegionResolutionError::GenericBoundFailure(_, generic, r) => {
@ -212,7 +281,7 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx.dcx(), tcx.dcx(),
error.origin().span(), error.origin().span(),
E0367, E0367,
"`Drop` impl requires `{outlives}` \ "`{polarity}{trait_name}` impl requires `{outlives}` \
but the {self_descr} it is implemented for does not", but the {self_descr} it is implemented for does not",
) )
.with_span_note(item_span, "the implementor must specify the same requirement") .with_span_note(item_span, "the implementor must specify the same requirement")

View file

@ -70,7 +70,6 @@ fn check_struct(tcx: TyCtxt<'_>, def_id: LocalDefId) {
check_transparent(tcx, def); check_transparent(tcx, def);
check_packed(tcx, span, def); check_packed(tcx, span, def);
check_unsafe_fields(tcx, def_id);
} }
fn check_union(tcx: TyCtxt<'_>, def_id: LocalDefId) { fn check_union(tcx: TyCtxt<'_>, def_id: LocalDefId) {
@ -144,36 +143,6 @@ fn check_union_fields(tcx: TyCtxt<'_>, span: Span, item_def_id: LocalDefId) -> b
true true
} }
/// Check that the unsafe fields do not need dropping.
fn check_unsafe_fields(tcx: TyCtxt<'_>, item_def_id: LocalDefId) {
let span = tcx.def_span(item_def_id);
let def = tcx.adt_def(item_def_id);
let typing_env = ty::TypingEnv::non_body_analysis(tcx, item_def_id);
let args = ty::GenericArgs::identity_for_item(tcx, item_def_id);
for field in def.all_fields() {
if !field.safety.is_unsafe() {
continue;
}
if !allowed_union_or_unsafe_field(tcx, field.ty(tcx, args), typing_env, span) {
let hir::Node::Field(field) = tcx.hir_node_by_def_id(field.did.expect_local()) else {
unreachable!("field has to correspond to hir field")
};
let ty_span = field.ty.span;
tcx.dcx().emit_err(errors::InvalidUnsafeField {
field_span: field.span,
sugg: errors::InvalidUnsafeFieldSuggestion {
lo: ty_span.shrink_to_lo(),
hi: ty_span.shrink_to_hi(),
},
note: (),
});
}
}
}
/// Check that a `static` is inhabited. /// Check that a `static` is inhabited.
fn check_static_inhabited(tcx: TyCtxt<'_>, def_id: LocalDefId) { fn check_static_inhabited(tcx: TyCtxt<'_>, def_id: LocalDefId) {
// Make sure statics are inhabited. // Make sure statics are inhabited.
@ -1512,7 +1481,6 @@ fn check_enum(tcx: TyCtxt<'_>, def_id: LocalDefId) {
detect_discriminant_duplicate(tcx, def); detect_discriminant_duplicate(tcx, def);
check_transparent(tcx, def); check_transparent(tcx, def);
check_unsafe_fields(tcx, def_id);
} }
/// Part of enum check. Given the discriminants of an enum, errors if two or more discriminants are equal /// Part of enum check. Given the discriminants of an enum, errors if two or more discriminants are equal

View file

@ -62,9 +62,9 @@ a type parameter).
*/ */
pub mod always_applicable;
mod check; mod check;
mod compare_impl_item; mod compare_impl_item;
pub mod dropck;
mod entry; mod entry;
pub mod intrinsic; pub mod intrinsic;
pub mod intrinsicck; pub mod intrinsicck;
@ -113,11 +113,11 @@ pub fn provide(providers: &mut Providers) {
} }
fn adt_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::Destructor> { fn adt_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::Destructor> {
tcx.calculate_dtor(def_id.to_def_id(), dropck::check_drop_impl) tcx.calculate_dtor(def_id.to_def_id(), always_applicable::check_drop_impl)
} }
fn adt_async_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::AsyncDestructor> { fn adt_async_destructor(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<ty::AsyncDestructor> {
tcx.calculate_async_dtor(def_id.to_def_id(), dropck::check_drop_impl) tcx.calculate_async_dtor(def_id.to_def_id(), always_applicable::check_drop_impl)
} }
/// Given a `DefId` for an opaque type in return position, find its parent item's return /// Given a `DefId` for an opaque type in return position, find its parent item's return

View file

@ -16,6 +16,7 @@ use rustc_span::{ErrorGuaranteed, sym};
use rustc_type_ir::elaborate; use rustc_type_ir::elaborate;
use tracing::debug; use tracing::debug;
use crate::check::always_applicable;
use crate::errors; use crate::errors;
mod builtin; mod builtin;
@ -24,11 +25,12 @@ mod inherent_impls_overlap;
mod orphan; mod orphan;
mod unsafety; mod unsafety;
fn check_impl( fn check_impl<'tcx>(
tcx: TyCtxt<'_>, tcx: TyCtxt<'tcx>,
impl_def_id: LocalDefId, impl_def_id: LocalDefId,
trait_ref: ty::TraitRef<'_>, trait_ref: ty::TraitRef<'tcx>,
trait_def: &ty::TraitDef, trait_def: &'tcx ty::TraitDef,
polarity: ty::ImplPolarity,
) -> Result<(), ErrorGuaranteed> { ) -> Result<(), ErrorGuaranteed> {
debug!( debug!(
"(checking implementation) adding impl for trait '{:?}', item '{}'", "(checking implementation) adding impl for trait '{:?}', item '{}'",
@ -44,6 +46,12 @@ fn check_impl(
enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id, trait_def) enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id, trait_def)
.and(enforce_empty_impls_for_marker_traits(tcx, impl_def_id, trait_ref.def_id, trait_def)) .and(enforce_empty_impls_for_marker_traits(tcx, impl_def_id, trait_ref.def_id, trait_def))
.and(always_applicable::check_negative_auto_trait_impl(
tcx,
impl_def_id,
trait_ref,
polarity,
))
} }
fn enforce_trait_manually_implementable( fn enforce_trait_manually_implementable(
@ -154,16 +162,16 @@ fn coherent_trait(tcx: TyCtxt<'_>, def_id: DefId) -> Result<(), ErrorGuaranteed>
let mut res = tcx.ensure_ok().specialization_graph_of(def_id); let mut res = tcx.ensure_ok().specialization_graph_of(def_id);
for &impl_def_id in impls { for &impl_def_id in impls {
let trait_header = tcx.impl_trait_header(impl_def_id).unwrap(); let impl_header = tcx.impl_trait_header(impl_def_id).unwrap();
let trait_ref = trait_header.trait_ref.instantiate_identity(); let trait_ref = impl_header.trait_ref.instantiate_identity();
let trait_def = tcx.trait_def(trait_ref.def_id); let trait_def = tcx.trait_def(trait_ref.def_id);
res = res res = res
.and(check_impl(tcx, impl_def_id, trait_ref, trait_def)) .and(check_impl(tcx, impl_def_id, trait_ref, trait_def, impl_header.polarity))
.and(check_object_overlap(tcx, impl_def_id, trait_ref)) .and(check_object_overlap(tcx, impl_def_id, trait_ref))
.and(unsafety::check_item(tcx, impl_def_id, trait_header, trait_def)) .and(unsafety::check_item(tcx, impl_def_id, impl_header, trait_def))
.and(tcx.ensure_ok().orphan_check_impl(impl_def_id)) .and(tcx.ensure_ok().orphan_check_impl(impl_def_id))
.and(builtin::check_trait(tcx, def_id, impl_def_id, trait_header)); .and(builtin::check_trait(tcx, def_id, impl_def_id, impl_header));
} }
res res

View file

@ -11,8 +11,6 @@ use rustc_middle::ty::Ty;
use rustc_span::{Ident, Span, Symbol}; use rustc_span::{Ident, Span, Symbol};
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
mod pattern_types;
pub(crate) use pattern_types::*;
pub(crate) mod wrong_number_of_generic_args; pub(crate) mod wrong_number_of_generic_args;
mod precise_captures; mod precise_captures;
@ -84,6 +82,8 @@ pub(crate) struct AssocItemNotFound<'a> {
pub label: Option<AssocItemNotFoundLabel<'a>>, pub label: Option<AssocItemNotFoundLabel<'a>>,
#[subdiagnostic] #[subdiagnostic]
pub sugg: Option<AssocItemNotFoundSugg<'a>>, pub sugg: Option<AssocItemNotFoundSugg<'a>>,
#[label(hir_analysis_within_macro)]
pub within_macro_span: Option<Span>,
} }
#[derive(Subdiagnostic)] #[derive(Subdiagnostic)]
@ -710,17 +710,6 @@ pub(crate) struct InvalidUnionField {
pub note: (), pub note: (),
} }
#[derive(Diagnostic)]
#[diag(hir_analysis_invalid_unsafe_field, code = E0740)]
pub(crate) struct InvalidUnsafeField {
#[primary_span]
pub field_span: Span,
#[subdiagnostic]
pub sugg: InvalidUnsafeFieldSuggestion,
#[note]
pub note: (),
}
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(hir_analysis_return_type_notation_on_non_rpitit)] #[diag(hir_analysis_return_type_notation_on_non_rpitit)]
pub(crate) struct ReturnTypeNotationOnNonRpitit<'tcx> { pub(crate) struct ReturnTypeNotationOnNonRpitit<'tcx> {
@ -742,18 +731,6 @@ pub(crate) struct InvalidUnionFieldSuggestion {
pub hi: Span, pub hi: Span,
} }
#[derive(Subdiagnostic)]
#[multipart_suggestion(
hir_analysis_invalid_unsafe_field_sugg,
applicability = "machine-applicable"
)]
pub(crate) struct InvalidUnsafeFieldSuggestion {
#[suggestion_part(code = "std::mem::ManuallyDrop<")]
pub lo: Span,
#[suggestion_part(code = ">")]
pub hi: Span,
}
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(hir_analysis_return_type_notation_equality_bound)] #[diag(hir_analysis_return_type_notation_equality_bound)]
pub(crate) struct ReturnTypeNotationEqualityBound { pub(crate) struct ReturnTypeNotationEqualityBound {

View file

@ -1,14 +0,0 @@
use rustc_macros::Diagnostic;
use rustc_middle::ty::Ty;
use rustc_span::Span;
#[derive(Diagnostic)]
#[diag(hir_analysis_invalid_base_type)]
pub(crate) struct InvalidBaseType<'tcx> {
pub ty: Ty<'tcx>,
#[primary_span]
pub ty_span: Span,
pub pat: &'static str,
#[note]
pub pat_span: Span,
}

View file

@ -151,6 +151,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
qself: &qself_str, qself: &qself_str,
label: None, label: None,
sugg: None, sugg: None,
// Try to get the span of the identifier within the path's syntax context
// (if that's different).
within_macro_span: assoc_name.span.within_macro(span, tcx.sess.source_map()),
}; };
if is_dummy { if is_dummy {

View file

@ -55,9 +55,7 @@ use tracing::{debug, instrument};
use self::errors::assoc_kind_str; use self::errors::assoc_kind_str;
use crate::check::check_abi_fn_ptr; use crate::check::check_abi_fn_ptr;
use crate::errors::{ use crate::errors::{AmbiguousLifetimeBound, BadReturnTypeNotation, NoVariantNamed};
AmbiguousLifetimeBound, BadReturnTypeNotation, InvalidBaseType, NoVariantNamed,
};
use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_constraint}; use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_constraint};
use crate::hir_ty_lowering::generics::{check_generic_arg_count, lower_generic_args}; use crate::hir_ty_lowering::generics::{check_generic_arg_count, lower_generic_args};
use crate::middle::resolve_bound_vars as rbv; use crate::middle::resolve_bound_vars as rbv;
@ -2692,28 +2690,26 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
let ty_span = ty.span; let ty_span = ty.span;
let ty = self.lower_ty(ty); let ty = self.lower_ty(ty);
let pat_ty = match pat.kind { let pat_ty = match pat.kind {
hir::TyPatKind::Range(start, end, include_end) => { hir::TyPatKind::Range(start, end) => {
let ty = match ty.kind() { let (ty, start, end) = match ty.kind() {
ty::Int(_) | ty::Uint(_) | ty::Char => ty, // Keep this list of types in sync with the list of types that
_ => Ty::new_error( // the `RangePattern` trait is implemented for.
tcx, ty::Int(_) | ty::Uint(_) | ty::Char => {
self.dcx().emit_err(InvalidBaseType { let start = self.lower_const_arg(start, FeedConstTy::No);
ty, let end = self.lower_const_arg(end, FeedConstTy::No);
pat: "range", (ty, start, end)
}
_ => {
let guar = self.dcx().span_delayed_bug(
ty_span, ty_span,
pat_span: pat.span, "invalid base type for range pattern",
}), );
), let errc = ty::Const::new_error(tcx, guar);
}; (Ty::new_error(tcx, guar), errc, errc)
let start = start.map(|expr| self.lower_const_arg(expr, FeedConstTy::No)); }
let end = end.map(|expr| self.lower_const_arg(expr, FeedConstTy::No));
let include_end = match include_end {
hir::RangeEnd::Included => true,
hir::RangeEnd::Excluded => false,
}; };
let pat = tcx.mk_pat(ty::PatternKind::Range { start, end, include_end }); let pat = tcx.mk_pat(ty::PatternKind::Range { start, end });
Ty::new_pat(tcx, ty, pat) Ty::new_pat(tcx, ty, pat)
} }
hir::TyPatKind::Err(e) => Ty::new_error(tcx, e), hir::TyPatKind::Err(e) => Ty::new_error(tcx, e),

View file

@ -252,15 +252,11 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
ty::Pat(typ, pat) => { ty::Pat(typ, pat) => {
match *pat { match *pat {
ty::PatternKind::Range { start, end, include_end: _ } => { ty::PatternKind::Range { start, end } => {
if let Some(start) = start {
self.add_constraints_from_const(current, start, variance); self.add_constraints_from_const(current, start, variance);
}
if let Some(end) = end {
self.add_constraints_from_const(current, end, variance); self.add_constraints_from_const(current, end, variance);
} }
} }
}
self.add_constraints_from_ty(current, typ, variance); self.add_constraints_from_ty(current, typ, variance);
} }

View file

@ -1943,17 +1943,10 @@ impl<'a> State<'a> {
// Pat isn't normalized, but the beauty of it // Pat isn't normalized, but the beauty of it
// is that it doesn't matter // is that it doesn't matter
match pat.kind { match pat.kind {
TyPatKind::Range(begin, end, end_kind) => { TyPatKind::Range(begin, end) => {
if let Some(expr) = begin { self.print_const_arg(begin);
self.print_const_arg(expr); self.word("..=");
} self.print_const_arg(end);
match end_kind {
RangeEnd::Included => self.word("..."),
RangeEnd::Excluded => self.word(".."),
}
if let Some(expr) = end {
self.print_const_arg(expr);
}
} }
TyPatKind::Err(_) => { TyPatKind::Err(_) => {
self.popen(); self.popen();

View file

@ -3069,7 +3069,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
"ban_nonexisting_field: field={:?}, base={:?}, expr={:?}, base_ty={:?}", "ban_nonexisting_field: field={:?}, base={:?}, expr={:?}, base_ty={:?}",
ident, base, expr, base_ty ident, base, expr, base_ty
); );
let mut err = self.no_such_field_err(ident, base_ty, base.hir_id); let mut err = self.no_such_field_err(ident, base_ty, expr);
match *base_ty.peel_refs().kind() { match *base_ty.peel_refs().kind() {
ty::Array(_, len) => { ty::Array(_, len) => {
@ -3282,18 +3282,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
); );
} }
fn no_such_field_err(&self, field: Ident, expr_t: Ty<'tcx>, id: HirId) -> Diag<'_> { fn no_such_field_err(
&self,
field: Ident,
base_ty: Ty<'tcx>,
expr: &hir::Expr<'tcx>,
) -> Diag<'_> {
let span = field.span; let span = field.span;
debug!("no_such_field_err(span: {:?}, field: {:?}, expr_t: {:?})", span, field, expr_t); debug!("no_such_field_err(span: {:?}, field: {:?}, expr_t: {:?})", span, field, base_ty);
let mut err = self.dcx().create_err(NoFieldOnType { span, ty: expr_t, field }); let mut err = self.dcx().create_err(NoFieldOnType { span, ty: base_ty, field });
if expr_t.references_error() { if base_ty.references_error() {
err.downgrade_to_delayed_bug(); err.downgrade_to_delayed_bug();
} }
if let Some(within_macro_span) = span.within_macro(expr.span, self.tcx.sess.source_map()) {
err.span_label(within_macro_span, "due to this macro variable");
}
// try to add a suggestion in case the field is a nested field of a field of the Adt // try to add a suggestion in case the field is a nested field of a field of the Adt
let mod_id = self.tcx.parent_module(id).to_def_id(); let mod_id = self.tcx.parent_module(expr.hir_id).to_def_id();
let (ty, unwrap) = if let ty::Adt(def, args) = expr_t.kind() let (ty, unwrap) = if let ty::Adt(def, args) = base_ty.kind()
&& (self.tcx.is_diagnostic_item(sym::Result, def.did()) && (self.tcx.is_diagnostic_item(sym::Result, def.did())
|| self.tcx.is_diagnostic_item(sym::Option, def.did())) || self.tcx.is_diagnostic_item(sym::Option, def.did()))
&& let Some(arg) = args.get(0) && let Some(arg) = args.get(0)
@ -3301,10 +3310,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{ {
(ty, "unwrap().") (ty, "unwrap().")
} else { } else {
(expr_t, "") (base_ty, "")
}; };
for (found_fields, args) in for (found_fields, args) in
self.get_field_candidates_considering_privacy_for_diag(span, ty, mod_id, id) self.get_field_candidates_considering_privacy_for_diag(span, ty, mod_id, expr.hir_id)
{ {
let field_names = found_fields.iter().map(|field| field.name).collect::<Vec<_>>(); let field_names = found_fields.iter().map(|field| field.name).collect::<Vec<_>>();
let mut candidate_fields: Vec<_> = found_fields let mut candidate_fields: Vec<_> = found_fields
@ -3317,7 +3326,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
args, args,
vec![], vec![],
mod_id, mod_id,
id, expr.hir_id,
) )
}) })
.map(|mut field_path| { .map(|mut field_path| {
@ -3328,7 +3337,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
candidate_fields.sort(); candidate_fields.sort();
let len = candidate_fields.len(); let len = candidate_fields.len();
if len > 0 { // Don't suggest `.field` if the base expr is from a different
// syntax context than the field.
if len > 0 && expr.span.eq_ctxt(field.span) {
err.span_suggestions( err.span_suggestions(
field.span.shrink_to_lo(), field.span.shrink_to_lo(),
format!( format!(
@ -3963,7 +3974,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
_ => (), _ => (),
}; };
self.no_such_field_err(field, container, expr.hir_id).emit(); self.no_such_field_err(field, container, expr).emit();
break; break;
} }

View file

@ -669,12 +669,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !errors.is_empty() { if !errors.is_empty() {
self.adjust_fulfillment_errors_for_expr_obligation(&mut errors); self.adjust_fulfillment_errors_for_expr_obligation(&mut errors);
let errors_causecode = errors
.iter()
.map(|e| (e.obligation.cause.span, e.root_obligation.cause.code().clone()))
.collect::<Vec<_>>();
self.err_ctxt().report_fulfillment_errors(errors); self.err_ctxt().report_fulfillment_errors(errors);
self.collect_unused_stmts_for_coerce_return_ty(errors_causecode);
} }
} }

View file

@ -3,9 +3,7 @@ use std::{fmt, iter, mem};
use itertools::Itertools; use itertools::Itertools;
use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::codes::*; use rustc_errors::codes::*;
use rustc_errors::{ use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan, a_or_an, listify, pluralize};
Applicability, Diag, ErrorGuaranteed, MultiSpan, StashKey, a_or_an, listify, pluralize,
};
use rustc_hir::def::{CtorOf, DefKind, Res}; use rustc_hir::def::{CtorOf, DefKind, Res};
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor; use rustc_hir::intravisit::Visitor;
@ -2193,62 +2191,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
} }
pub(super) fn collect_unused_stmts_for_coerce_return_ty(
&self,
errors_causecode: Vec<(Span, ObligationCauseCode<'tcx>)>,
) {
for (span, code) in errors_causecode {
self.dcx().try_steal_modify_and_emit_err(span, StashKey::MaybeForgetReturn, |err| {
if let Some(fn_sig) = self.body_fn_sig()
&& let ObligationCauseCode::WhereClauseInExpr(_, _, binding_hir_id, ..) = code
&& !fn_sig.output().is_unit()
{
let mut block_num = 0;
let mut found_semi = false;
for (hir_id, node) in self.tcx.hir_parent_iter(binding_hir_id) {
// Don't proceed into parent bodies
if hir_id.owner != binding_hir_id.owner {
break;
}
match node {
hir::Node::Stmt(stmt) => {
if let hir::StmtKind::Semi(expr) = stmt.kind {
let expr_ty = self.typeck_results.borrow().expr_ty(expr);
let return_ty = fn_sig.output();
if !matches!(expr.kind, hir::ExprKind::Ret(..))
&& self.may_coerce(expr_ty, return_ty)
{
found_semi = true;
}
}
}
hir::Node::Block(_block) => {
if found_semi {
block_num += 1;
}
}
hir::Node::Item(item) => {
if let hir::ItemKind::Fn { .. } = item.kind {
break;
}
}
_ => {}
}
}
if block_num > 1 && found_semi {
err.span_suggestion_verbose(
// use the span of the *whole* expr
self.tcx.hir().span(binding_hir_id).shrink_to_lo(),
"you might have meant to return this to infer its type parameters",
"return ",
Applicability::MaybeIncorrect,
);
}
}
});
}
}
/// Given a vector of fulfillment errors, try to adjust the spans of the /// Given a vector of fulfillment errors, try to adjust the spans of the
/// errors to more accurately point at the cause of the failure. /// errors to more accurately point at the cause of the failure.
/// ///

View file

@ -158,7 +158,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.typeck_results.borrow_mut().used_trait_imports.insert(import_id); self.typeck_results.borrow_mut().used_trait_imports.insert(import_id);
} }
let (span, sugg_span, source, item_name, args) = match self.tcx.hir_node(call_id) { let (span, expr_span, source, item_name, args) = match self.tcx.hir_node(call_id) {
hir::Node::Expr(&hir::Expr { hir::Node::Expr(&hir::Expr {
kind: hir::ExprKind::MethodCall(segment, rcvr, args, _), kind: hir::ExprKind::MethodCall(segment, rcvr, args, _),
span, span,
@ -194,6 +194,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
node => unreachable!("{node:?}"), node => unreachable!("{node:?}"),
}; };
// Try to get the span of the identifier within the expression's syntax context
// (if that's different).
let within_macro_span = span.within_macro(expr_span, self.tcx.sess.source_map());
// Avoid suggestions when we don't know what's going on. // Avoid suggestions when we don't know what's going on.
if let Err(guar) = rcvr_ty.error_reported() { if let Err(guar) = rcvr_ty.error_reported() {
return guar; return guar;
@ -207,10 +211,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
call_id, call_id,
source, source,
args, args,
sugg_span, expr_span,
&mut no_match_data, &mut no_match_data,
expected, expected,
trait_missing_method, trait_missing_method,
within_macro_span,
), ),
MethodError::Ambiguity(mut sources) => { MethodError::Ambiguity(mut sources) => {
@ -221,6 +226,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
"multiple applicable items in scope" "multiple applicable items in scope"
); );
err.span_label(item_name.span, format!("multiple `{item_name}` found")); err.span_label(item_name.span, format!("multiple `{item_name}` found"));
if let Some(within_macro_span) = within_macro_span {
err.span_label(within_macro_span, "due to this macro variable");
}
self.note_candidates_on_method_error( self.note_candidates_on_method_error(
rcvr_ty, rcvr_ty,
@ -230,7 +238,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span, span,
&mut err, &mut err,
&mut sources, &mut sources,
Some(sugg_span), Some(expr_span),
); );
err.emit() err.emit()
} }
@ -252,6 +260,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.span_if_local(def_id) .span_if_local(def_id)
.unwrap_or_else(|| self.tcx.def_span(def_id)); .unwrap_or_else(|| self.tcx.def_span(def_id));
err.span_label(sp, format!("private {kind} defined here")); err.span_label(sp, format!("private {kind} defined here"));
if let Some(within_macro_span) = within_macro_span {
err.span_label(within_macro_span, "due to this macro variable");
}
self.suggest_valid_traits(&mut err, item_name, out_of_scope_traits, true); self.suggest_valid_traits(&mut err, item_name, out_of_scope_traits, true);
err.emit() err.emit()
} }
@ -268,6 +279,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !needs_mut { if !needs_mut {
err.span_label(bound_span, "this has a `Sized` requirement"); err.span_label(bound_span, "this has a `Sized` requirement");
} }
if let Some(within_macro_span) = within_macro_span {
err.span_label(within_macro_span, "due to this macro variable");
}
if !candidates.is_empty() { if !candidates.is_empty() {
let help = format!( let help = format!(
"{an}other candidate{s} {were} found in the following trait{s}", "{an}other candidate{s} {were} found in the following trait{s}",
@ -581,6 +595,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
no_match_data: &mut NoMatchData<'tcx>, no_match_data: &mut NoMatchData<'tcx>,
expected: Expectation<'tcx>, expected: Expectation<'tcx>,
trait_missing_method: bool, trait_missing_method: bool,
within_macro_span: Option<Span>,
) -> ErrorGuaranteed { ) -> ErrorGuaranteed {
let mode = no_match_data.mode; let mode = no_match_data.mode;
let tcx = self.tcx; let tcx = self.tcx;
@ -721,6 +736,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if tcx.sess.source_map().is_multiline(sugg_span) { if tcx.sess.source_map().is_multiline(sugg_span) {
err.span_label(sugg_span.with_hi(span.lo()), ""); err.span_label(sugg_span.with_hi(span.lo()), "");
} }
if let Some(within_macro_span) = within_macro_span {
err.span_label(within_macro_span, "due to this macro variable");
}
if short_ty_str.len() < ty_str.len() && ty_str.len() > 10 { if short_ty_str.len() < ty_str.len() && ty_str.len() > 10 {
ty_str = short_ty_str; ty_str = short_ty_str;

View file

@ -882,27 +882,13 @@ fn ty_is_known_nonnull<'tcx>(
|| Option::unwrap_or_default( || Option::unwrap_or_default(
try { try {
match **pat { match **pat {
ty::PatternKind::Range { start, end, include_end } => { ty::PatternKind::Range { start, end } => {
match (start, end) { let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
(Some(start), None) => { let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
start.try_to_value()?.try_to_bits(tcx, typing_env)? > 0
}
(Some(start), Some(end)) => {
let start =
start.try_to_value()?.try_to_bits(tcx, typing_env)?;
let end =
end.try_to_value()?.try_to_bits(tcx, typing_env)?;
if include_end {
// This also works for negative numbers, as we just need // This also works for negative numbers, as we just need
// to ensure we aren't wrapping over zero. // to ensure we aren't wrapping over zero.
start > 0 && end >= start start > 0 && end >= start
} else {
start > 0 && end > start
}
}
_ => false,
}
} }
} }
}, },

View file

@ -470,7 +470,7 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
// Find the provenance. // Find the provenance.
let (offset, _prov) = self let (offset, _prov) = self
.provenance .provenance
.range_get_ptrs(range, cx) .range_ptrs_get(range, cx)
.first() .first()
.copied() .copied()
.expect("there must be provenance somewhere here"); .expect("there must be provenance somewhere here");

View file

@ -67,7 +67,7 @@ impl ProvenanceMap {
} }
impl<Prov: Provenance> ProvenanceMap<Prov> { impl<Prov: Provenance> ProvenanceMap<Prov> {
fn adjusted_range(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> { fn adjusted_range_ptrs(range: AllocRange, cx: &impl HasDataLayout) -> Range<Size> {
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
// the beginning of this range. // the beginning of this range.
let adjusted_start = Size::from_bytes( let adjusted_start = Size::from_bytes(
@ -79,26 +79,21 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
/// Returns all ptr-sized provenance in the given range. /// Returns all ptr-sized provenance in the given range.
/// If the range has length 0, returns provenance that crosses the edge between `start-1` and /// If the range has length 0, returns provenance that crosses the edge between `start-1` and
/// `start`. /// `start`.
pub(super) fn range_get_ptrs( pub(super) fn range_ptrs_get(
&self, &self,
range: AllocRange, range: AllocRange,
cx: &impl HasDataLayout, cx: &impl HasDataLayout,
) -> &[(Size, Prov)] { ) -> &[(Size, Prov)] {
self.ptrs.range(Self::adjusted_range(range, cx)) self.ptrs.range(Self::adjusted_range_ptrs(range, cx))
} }
/// `pm.range_get_ptrs_is_empty(r, cx)` == `pm.range_get_ptrs(r, cx).is_empty()`, but is /// `pm.range_ptrs_is_empty(r, cx)` == `pm.range_ptrs_get(r, cx).is_empty()`, but is faster.
/// faster. pub(super) fn range_ptrs_is_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
pub(super) fn range_get_ptrs_is_empty( self.ptrs.range_is_empty(Self::adjusted_range_ptrs(range, cx))
&self,
range: AllocRange,
cx: &impl HasDataLayout,
) -> bool {
self.ptrs.range_is_empty(Self::adjusted_range(range, cx))
} }
/// Returns all byte-wise provenance in the given range. /// Returns all byte-wise provenance in the given range.
fn range_get_bytes(&self, range: AllocRange) -> &[(Size, Prov)] { fn range_bytes_get(&self, range: AllocRange) -> &[(Size, Prov)] {
if let Some(bytes) = self.bytes.as_ref() { if let Some(bytes) = self.bytes.as_ref() {
bytes.range(range.start..range.end()) bytes.range(range.start..range.end())
} else { } else {
@ -106,9 +101,14 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
} }
} }
/// Same as `range_bytes_get(range).is_empty()`, but faster.
fn range_bytes_is_empty(&self, range: AllocRange) -> bool {
self.bytes.as_ref().is_none_or(|bytes| bytes.range_is_empty(range.start..range.end()))
}
/// Get the provenance of a single byte. /// Get the provenance of a single byte.
pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> { pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> {
let prov = self.range_get_ptrs(alloc_range(offset, Size::from_bytes(1)), cx); let prov = self.range_ptrs_get(alloc_range(offset, Size::from_bytes(1)), cx);
debug_assert!(prov.len() <= 1); debug_assert!(prov.len() <= 1);
if let Some(entry) = prov.first() { if let Some(entry) = prov.first() {
// If it overlaps with this byte, it is on this byte. // If it overlaps with this byte, it is on this byte.
@ -132,7 +132,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
/// limit access to provenance outside of the `Allocation` abstraction. /// limit access to provenance outside of the `Allocation` abstraction.
/// ///
pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool { pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
self.range_get_ptrs_is_empty(range, cx) && self.range_get_bytes(range).is_empty() self.range_ptrs_is_empty(range, cx) && self.range_bytes_is_empty(range)
} }
/// Yields all the provenances stored in this map. /// Yields all the provenances stored in this map.
@ -164,14 +164,14 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
// provenance that overlaps with the given range. // provenance that overlaps with the given range.
let (first, last) = { let (first, last) = {
// Find all provenance overlapping the given range. // Find all provenance overlapping the given range.
if self.range_get_ptrs_is_empty(range, cx) { if self.range_ptrs_is_empty(range, cx) {
// No provenance in this range, we are done. This is the common case. // No provenance in this range, we are done. This is the common case.
return Ok(()); return Ok(());
} }
// This redoes some of the work of `range_get_ptrs_is_empty`, but this path is much // This redoes some of the work of `range_get_ptrs_is_empty`, but this path is much
// colder than the early return above, so it's worth it. // colder than the early return above, so it's worth it.
let provenance = self.range_get_ptrs(range, cx); let provenance = self.range_ptrs_get(range, cx);
( (
provenance.first().unwrap().0, provenance.first().unwrap().0,
provenance.last().unwrap().0 + cx.data_layout().pointer_size, provenance.last().unwrap().0 + cx.data_layout().pointer_size,
@ -284,8 +284,8 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
// This includes the existing bytewise provenance in the range, and ptr provenance // This includes the existing bytewise provenance in the range, and ptr provenance
// that overlaps with the begin/end of the range. // that overlaps with the begin/end of the range.
let mut dest_bytes_box = None; let mut dest_bytes_box = None;
let begin_overlap = self.range_get_ptrs(alloc_range(src.start, Size::ZERO), cx).first(); let begin_overlap = self.range_ptrs_get(alloc_range(src.start, Size::ZERO), cx).first();
let end_overlap = self.range_get_ptrs(alloc_range(src.end(), Size::ZERO), cx).first(); let end_overlap = self.range_ptrs_get(alloc_range(src.end(), Size::ZERO), cx).first();
if !Prov::OFFSET_IS_ADDR { if !Prov::OFFSET_IS_ADDR {
// There can't be any bytewise provenance, and we cannot split up the begin/end overlap. // There can't be any bytewise provenance, and we cannot split up the begin/end overlap.
if let Some(entry) = begin_overlap { if let Some(entry) = begin_overlap {
@ -308,10 +308,10 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
} else { } else {
trace!("no start overlapping entry"); trace!("no start overlapping entry");
} }
// Then the main part, bytewise provenance from `self.bytes`. // Then the main part, bytewise provenance from `self.bytes`.
if let Some(all_bytes) = self.bytes.as_ref() { bytes.extend(self.range_bytes_get(src));
bytes.extend(all_bytes.range(src.start..src.end()));
}
// And finally possibly parts of a pointer at the end. // And finally possibly parts of a pointer at the end.
if let Some(entry) = end_overlap { if let Some(entry) = end_overlap {
trace!("end overlapping entry: {entry:?}"); trace!("end overlapping entry: {entry:?}");

View file

@ -19,7 +19,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
use rustc_hir::{BindingMode, ByRef, HirId, MatchSource, RangeEnd}; use rustc_hir::{BindingMode, ByRef, HirId, MatchSource, RangeEnd};
use rustc_index::{IndexVec, newtype_index}; use rustc_index::{IndexVec, newtype_index};
use rustc_macros::{HashStable, TypeVisitable}; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeVisitable};
use rustc_span::def_id::LocalDefId; use rustc_span::def_id::LocalDefId;
use rustc_span::{ErrorGuaranteed, Span, Symbol}; use rustc_span::{ErrorGuaranteed, Span, Symbol};
use rustc_target::asm::InlineAsmRegOrRegClass; use rustc_target::asm::InlineAsmRegOrRegClass;
@ -49,10 +49,13 @@ macro_rules! thir_with_elements {
} }
)* )*
// Note: Making `Thir` implement `Clone` is useful for external tools that need access to
// THIR bodies even after the `Steal` query result has been stolen.
// One such tool is https://github.com/rust-corpus/qrates/.
/// A container for a THIR body. /// A container for a THIR body.
/// ///
/// This can be indexed directly by any THIR index (e.g. [`ExprId`]). /// This can be indexed directly by any THIR index (e.g. [`ExprId`]).
#[derive(Debug, HashStable)] #[derive(Debug, HashStable, Clone)]
pub struct Thir<'tcx> { pub struct Thir<'tcx> {
pub body_type: BodyTy<'tcx>, pub body_type: BodyTy<'tcx>,
$( $(
@ -90,7 +93,7 @@ thir_with_elements! {
params: ParamId => Param<'tcx> => "p{}", params: ParamId => Param<'tcx> => "p{}",
} }
#[derive(Debug, HashStable)] #[derive(Debug, HashStable, Clone)]
pub enum BodyTy<'tcx> { pub enum BodyTy<'tcx> {
Const(Ty<'tcx>), Const(Ty<'tcx>),
Fn(FnSig<'tcx>), Fn(FnSig<'tcx>),
@ -98,7 +101,7 @@ pub enum BodyTy<'tcx> {
} }
/// Description of a type-checked function parameter. /// Description of a type-checked function parameter.
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct Param<'tcx> { pub struct Param<'tcx> {
/// The pattern that appears in the parameter list, or None for implicit parameters. /// The pattern that appears in the parameter list, or None for implicit parameters.
pub pat: Option<Box<Pat<'tcx>>>, pub pat: Option<Box<Pat<'tcx>>>,
@ -118,7 +121,7 @@ pub enum LintLevel {
Explicit(HirId), Explicit(HirId),
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct Block { pub struct Block {
/// Whether the block itself has a label. Used by `label: {}` /// Whether the block itself has a label. Used by `label: {}`
/// and `try` blocks. /// and `try` blocks.
@ -138,7 +141,7 @@ pub struct Block {
type UserTy<'tcx> = Option<Box<CanonicalUserType<'tcx>>>; type UserTy<'tcx> = Option<Box<CanonicalUserType<'tcx>>>;
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct AdtExpr<'tcx> { pub struct AdtExpr<'tcx> {
/// The ADT we're constructing. /// The ADT we're constructing.
pub adt_def: AdtDef<'tcx>, pub adt_def: AdtDef<'tcx>,
@ -155,7 +158,7 @@ pub struct AdtExpr<'tcx> {
pub base: AdtExprBase<'tcx>, pub base: AdtExprBase<'tcx>,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub enum AdtExprBase<'tcx> { pub enum AdtExprBase<'tcx> {
/// A struct expression where all the fields are explicitly enumerated: `Foo { a, b }`. /// A struct expression where all the fields are explicitly enumerated: `Foo { a, b }`.
None, None,
@ -168,7 +171,7 @@ pub enum AdtExprBase<'tcx> {
DefaultFields(Box<[Ty<'tcx>]>), DefaultFields(Box<[Ty<'tcx>]>),
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct ClosureExpr<'tcx> { pub struct ClosureExpr<'tcx> {
pub closure_id: LocalDefId, pub closure_id: LocalDefId,
pub args: UpvarArgs<'tcx>, pub args: UpvarArgs<'tcx>,
@ -177,7 +180,7 @@ pub struct ClosureExpr<'tcx> {
pub fake_reads: Vec<(ExprId, FakeReadCause, HirId)>, pub fake_reads: Vec<(ExprId, FakeReadCause, HirId)>,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct InlineAsmExpr<'tcx> { pub struct InlineAsmExpr<'tcx> {
pub asm_macro: AsmMacro, pub asm_macro: AsmMacro,
pub template: &'tcx [InlineAsmTemplatePiece], pub template: &'tcx [InlineAsmTemplatePiece],
@ -195,12 +198,12 @@ pub enum BlockSafety {
ExplicitUnsafe(HirId), ExplicitUnsafe(HirId),
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct Stmt<'tcx> { pub struct Stmt<'tcx> {
pub kind: StmtKind<'tcx>, pub kind: StmtKind<'tcx>,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub enum StmtKind<'tcx> { pub enum StmtKind<'tcx> {
/// An expression with a trailing semicolon. /// An expression with a trailing semicolon.
Expr { Expr {
@ -240,11 +243,11 @@ pub enum StmtKind<'tcx> {
}, },
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, HashStable)] #[derive(Clone, Debug, Copy, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable)]
pub struct LocalVarId(pub HirId); pub struct LocalVarId(pub HirId);
/// A THIR expression. /// A THIR expression.
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct Expr<'tcx> { pub struct Expr<'tcx> {
/// kind of expression /// kind of expression
pub kind: ExprKind<'tcx>, pub kind: ExprKind<'tcx>,
@ -271,7 +274,7 @@ pub struct TempLifetime {
pub backwards_incompatible: Option<region::Scope>, pub backwards_incompatible: Option<region::Scope>,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub enum ExprKind<'tcx> { pub enum ExprKind<'tcx> {
/// `Scope`s are used to explicitly mark destruction scopes, /// `Scope`s are used to explicitly mark destruction scopes,
/// and to track the `HirId` of the expressions within the scope. /// and to track the `HirId` of the expressions within the scope.
@ -548,20 +551,20 @@ pub enum ExprKind<'tcx> {
/// Represents the association of a field identifier and an expression. /// Represents the association of a field identifier and an expression.
/// ///
/// This is used in struct constructors. /// This is used in struct constructors.
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct FieldExpr { pub struct FieldExpr {
pub name: FieldIdx, pub name: FieldIdx,
pub expr: ExprId, pub expr: ExprId,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct FruInfo<'tcx> { pub struct FruInfo<'tcx> {
pub base: ExprId, pub base: ExprId,
pub field_types: Box<[Ty<'tcx>]>, pub field_types: Box<[Ty<'tcx>]>,
} }
/// A `match` arm. /// A `match` arm.
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub struct Arm<'tcx> { pub struct Arm<'tcx> {
pub pattern: Box<Pat<'tcx>>, pub pattern: Box<Pat<'tcx>>,
pub guard: Option<ExprId>, pub guard: Option<ExprId>,
@ -579,7 +582,7 @@ pub enum LogicalOp {
Or, Or,
} }
#[derive(Debug, HashStable)] #[derive(Clone, Debug, HashStable)]
pub enum InlineAsmOperand<'tcx> { pub enum InlineAsmOperand<'tcx> {
In { In {
reg: InlineAsmRegOrRegClass, reg: InlineAsmRegOrRegClass,
@ -616,13 +619,13 @@ pub enum InlineAsmOperand<'tcx> {
}, },
} }
#[derive(Debug, HashStable, TypeVisitable)] #[derive(Clone, Debug, HashStable, TypeVisitable)]
pub struct FieldPat<'tcx> { pub struct FieldPat<'tcx> {
pub field: FieldIdx, pub field: FieldIdx,
pub pattern: Pat<'tcx>, pub pattern: Pat<'tcx>,
} }
#[derive(Debug, HashStable, TypeVisitable)] #[derive(Clone, Debug, HashStable, TypeVisitable)]
pub struct Pat<'tcx> { pub struct Pat<'tcx> {
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
pub span: Span, pub span: Span,
@ -729,7 +732,7 @@ impl<'tcx> Pat<'tcx> {
} }
} }
#[derive(Debug, HashStable, TypeVisitable)] #[derive(Clone, Debug, HashStable, TypeVisitable)]
pub struct Ascription<'tcx> { pub struct Ascription<'tcx> {
pub annotation: CanonicalUserTypeAnnotation<'tcx>, pub annotation: CanonicalUserTypeAnnotation<'tcx>,
/// Variance to use when relating the `user_ty` to the **type of the value being /// Variance to use when relating the `user_ty` to the **type of the value being
@ -753,7 +756,7 @@ pub struct Ascription<'tcx> {
pub variance: ty::Variance, pub variance: ty::Variance,
} }
#[derive(Debug, HashStable, TypeVisitable)] #[derive(Clone, Debug, HashStable, TypeVisitable)]
pub enum PatKind<'tcx> { pub enum PatKind<'tcx> {
/// A wildcard pattern: `_`. /// A wildcard pattern: `_`.
Wild, Wild,

View file

@ -397,9 +397,9 @@ pub enum ObligationCauseCode<'tcx> {
RustCall, RustCall,
/// Obligations to prove that a `std::ops::Drop` impl is not stronger than /// Obligations to prove that a `Drop` or negative auto trait impl is not stronger than
/// the ADT it's being implemented for. /// the ADT it's being implemented for.
DropImpl, AlwaysApplicableImpl,
/// Requirement for a `const N: Ty` to implement `Ty: ConstParamTy` /// Requirement for a `const N: Ty` to implement `Ty: ConstParamTy`
ConstParam(Ty<'tcx>), ConstParam(Ty<'tcx>),

View file

@ -220,13 +220,9 @@ impl FlagComputation {
&ty::Pat(ty, pat) => { &ty::Pat(ty, pat) => {
self.add_ty(ty); self.add_ty(ty);
match *pat { match *pat {
ty::PatternKind::Range { start, end, include_end: _ } => { ty::PatternKind::Range { start, end } => {
if let Some(start) = start { self.add_const(start);
self.add_const(start) self.add_const(end);
}
if let Some(end) = end {
self.add_const(end)
}
} }
} }
} }

View file

@ -236,6 +236,11 @@ impl<'tcx> InhabitedPredicate<'tcx> {
self.instantiate_opt(tcx, args).unwrap_or(self) self.instantiate_opt(tcx, args).unwrap_or(self)
} }
/// Same as [`Self::instantiate`], but if there is no generics to
/// instantiate, returns `None`. This is useful because it lets us avoid
/// allocating a recursive copy of everything when the result is unchanged.
///
/// Only used to implement `instantiate` itself.
fn instantiate_opt(self, tcx: TyCtxt<'tcx>, args: ty::GenericArgsRef<'tcx>) -> Option<Self> { fn instantiate_opt(self, tcx: TyCtxt<'tcx>, args: ty::GenericArgsRef<'tcx>) -> Option<Self> {
match self { match self {
Self::ConstIsZero(c) => { Self::ConstIsZero(c) => {
@ -260,7 +265,10 @@ impl<'tcx> InhabitedPredicate<'tcx> {
Some(InhabitedPredicate::True) => Some(InhabitedPredicate::True), Some(InhabitedPredicate::True) => Some(InhabitedPredicate::True),
Some(a) => Some(a.or(tcx, b.instantiate_opt(tcx, args).unwrap_or(b))), Some(a) => Some(a.or(tcx, b.instantiate_opt(tcx, args).unwrap_or(b))),
}, },
_ => None, Self::True | Self::False | Self::NotInModule(_) => None,
Self::OpaqueType(_) => {
bug!("unexpected OpaqueType in InhabitedPredicate");
}
} }
} }
} }

View file

@ -26,18 +26,30 @@ impl<'tcx> fmt::Debug for Pattern<'tcx> {
impl<'tcx> fmt::Debug for PatternKind<'tcx> { impl<'tcx> fmt::Debug for PatternKind<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self { match *self {
PatternKind::Range { start, end, include_end } => { PatternKind::Range { start, end } => {
if let Some(start) = start {
write!(f, "{start}")?; write!(f, "{start}")?;
if let Some(c) = end.try_to_value() {
let end = c.valtree.unwrap_leaf();
let size = end.size();
let max = match c.ty.kind() {
ty::Int(_) => {
Some(ty::ScalarInt::truncate_from_int(size.signed_int_max(), size))
} }
write!(f, "..")?; ty::Uint(_) => {
if include_end { Some(ty::ScalarInt::truncate_from_uint(size.unsigned_int_max(), size))
write!(f, "=")?;
} }
if let Some(end) = end { ty::Char => Some(ty::ScalarInt::truncate_from_uint(char::MAX, size)),
write!(f, "{end}")?; _ => None,
};
if let Some((max, _)) = max
&& end == max
{
return write!(f, "..");
} }
Ok(()) }
write!(f, "..={end}")
} }
} }
} }
@ -46,5 +58,5 @@ impl<'tcx> fmt::Debug for PatternKind<'tcx> {
#[derive(Clone, PartialEq, Eq, Hash)] #[derive(Clone, PartialEq, Eq, Hash)]
#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)] #[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)]
pub enum PatternKind<'tcx> { pub enum PatternKind<'tcx> {
Range { start: Option<ty::Const<'tcx>>, end: Option<ty::Const<'tcx>>, include_end: bool }, Range { start: ty::Const<'tcx>, end: ty::Const<'tcx> },
} }

View file

@ -51,22 +51,12 @@ impl<'tcx> Relate<TyCtxt<'tcx>> for ty::Pattern<'tcx> {
) -> RelateResult<'tcx, Self> { ) -> RelateResult<'tcx, Self> {
match (&*a, &*b) { match (&*a, &*b) {
( (
&ty::PatternKind::Range { start: start_a, end: end_a, include_end: inc_a }, &ty::PatternKind::Range { start: start_a, end: end_a },
&ty::PatternKind::Range { start: start_b, end: end_b, include_end: inc_b }, &ty::PatternKind::Range { start: start_b, end: end_b },
) => { ) => {
// FIXME(pattern_types): make equal patterns equal (`0..=` is the same as `..=`). let start = relation.relate(start_a, start_b)?;
let mut relate_opt_const = |a, b| match (a, b) { let end = relation.relate(end_a, end_b)?;
(None, None) => Ok(None), Ok(relation.cx().mk_pat(ty::PatternKind::Range { start, end }))
(Some(a), Some(b)) => relation.relate(a, b).map(Some),
// FIXME(pattern_types): report a better error
_ => Err(TypeError::Mismatch),
};
let start = relate_opt_const(start_a, start_b)?;
let end = relate_opt_const(end_a, end_b)?;
if inc_a != inc_b {
todo!()
}
Ok(relation.cx().mk_pat(ty::PatternKind::Range { start, end, include_end: inc_a }))
} }
} }
} }

View file

@ -284,6 +284,7 @@ TrivialTypeTraversalImpls! {
rustc_hir::def_id::LocalDefId, rustc_hir::def_id::LocalDefId,
rustc_hir::HirId, rustc_hir::HirId,
rustc_hir::MatchSource, rustc_hir::MatchSource,
rustc_hir::RangeEnd,
rustc_span::Ident, rustc_span::Ident,
rustc_span::Span, rustc_span::Span,
rustc_span::Symbol, rustc_span::Symbol,

View file

@ -137,9 +137,9 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
ty::Pat(ty, pat) => { ty::Pat(ty, pat) => {
match *pat { match *pat {
ty::PatternKind::Range { start, end, include_end: _ } => { ty::PatternKind::Range { start, end } => {
stack.extend(end.map(Into::into)); stack.push(end.into());
stack.extend(start.map(Into::into)); stack.push(start.into());
} }
} }
stack.push(ty.into()); stack.push(ty.into());

View file

@ -2954,14 +2954,27 @@ impl<'a> Parser<'a> {
} }
_ => unreachable!(), _ => unreachable!(),
}; };
// is lifetime `n` tokens ahead?
let is_lifetime = |this: &Self, n| this.look_ahead(n, |t| t.is_lifetime());
// Is `self` `n` tokens ahead? // Is `self` `n` tokens ahead?
let is_isolated_self = |this: &Self, n| { let is_isolated_self = |this: &Self, n| {
this.is_keyword_ahead(n, &[kw::SelfLower]) this.is_keyword_ahead(n, &[kw::SelfLower])
&& this.look_ahead(n + 1, |t| t != &token::PathSep) && this.look_ahead(n + 1, |t| t != &token::PathSep)
}; };
// Is `pin const self` `n` tokens ahead?
let is_isolated_pin_const_self = |this: &Self, n| {
this.look_ahead(n, |token| token.is_ident_named(sym::pin))
&& this.is_keyword_ahead(n + 1, &[kw::Const])
&& is_isolated_self(this, n + 2)
};
// Is `mut self` `n` tokens ahead? // Is `mut self` `n` tokens ahead?
let is_isolated_mut_self = let is_isolated_mut_self =
|this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1); |this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1);
// Is `pin mut self` `n` tokens ahead?
let is_isolated_pin_mut_self = |this: &Self, n| {
this.look_ahead(n, |token| token.is_ident_named(sym::pin))
&& is_isolated_mut_self(this, n + 1)
};
// Parse `self` or `self: TYPE`. We already know the current token is `self`. // Parse `self` or `self: TYPE`. We already know the current token is `self`.
let parse_self_possibly_typed = |this: &mut Self, m| { let parse_self_possibly_typed = |this: &mut Self, m| {
let eself_ident = expect_self_ident(this); let eself_ident = expect_self_ident(this);
@ -3012,26 +3025,35 @@ impl<'a> Parser<'a> {
let eself_lo = self.token.span; let eself_lo = self.token.span;
let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind { let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
token::And => { token::And => {
let eself = if is_isolated_self(self, 1) { let has_lifetime = is_lifetime(self, 1);
// `&self` let skip_lifetime_count = has_lifetime as usize;
self.bump(); let eself = if is_isolated_self(self, skip_lifetime_count + 1) {
SelfKind::Region(None, Mutability::Not) // `&{'lt} self`
} else if is_isolated_mut_self(self, 1) { self.bump(); // &
// `&mut self` let lifetime = has_lifetime.then(|| self.expect_lifetime());
self.bump(); SelfKind::Region(lifetime, Mutability::Not)
self.bump(); } else if is_isolated_mut_self(self, skip_lifetime_count + 1) {
SelfKind::Region(None, Mutability::Mut) // `&{'lt} mut self`
} else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) { self.bump(); // &
// `&'lt self` let lifetime = has_lifetime.then(|| self.expect_lifetime());
self.bump(); self.bump(); // mut
let lt = self.expect_lifetime(); SelfKind::Region(lifetime, Mutability::Mut)
SelfKind::Region(Some(lt), Mutability::Not) } else if is_isolated_pin_const_self(self, skip_lifetime_count + 1) {
} else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) { // `&{'lt} pin const self`
// `&'lt mut self` self.bump(); // &
self.bump(); let lifetime = has_lifetime.then(|| self.expect_lifetime());
let lt = self.expect_lifetime(); self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
self.bump(); self.bump(); // pin
SelfKind::Region(Some(lt), Mutability::Mut) self.bump(); // const
SelfKind::Pinned(lifetime, Mutability::Not)
} else if is_isolated_pin_mut_self(self, skip_lifetime_count + 1) {
// `&{'lt} pin mut self`
self.bump(); // &
let lifetime = has_lifetime.then(|| self.expect_lifetime());
self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
self.bump(); // pin
self.bump(); // mut
SelfKind::Pinned(lifetime, Mutability::Mut)
} else { } else {
// `&not_self` // `&not_self`
return Ok(None); return Ok(None);

View file

@ -624,7 +624,7 @@ pub(crate) struct UnnecessaryQualification<'ra> {
pub removal_span: Span, pub removal_span: Span,
} }
#[derive(Default)] #[derive(Default, Debug)]
struct DiagMetadata<'ast> { struct DiagMetadata<'ast> {
/// The current trait's associated items' ident, used for diagnostic suggestions. /// The current trait's associated items' ident, used for diagnostic suggestions.
current_trait_assoc_items: Option<&'ast [P<AssocItem>]>, current_trait_assoc_items: Option<&'ast [P<AssocItem>]>,
@ -3147,6 +3147,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
PathSource::Trait(AliasPossibility::No), PathSource::Trait(AliasPossibility::No),
Finalize::new(trait_ref.ref_id, trait_ref.path.span), Finalize::new(trait_ref.ref_id, trait_ref.path.span),
RecordPartialRes::Yes, RecordPartialRes::Yes,
None,
); );
self.diag_metadata.currently_processing_impl_trait = None; self.diag_metadata.currently_processing_impl_trait = None;
if let Some(def_id) = res.expect_full_res().opt_def_id() { if let Some(def_id) = res.expect_full_res().opt_def_id() {
@ -4073,6 +4074,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
source, source,
Finalize::new(id, path.span), Finalize::new(id, path.span),
RecordPartialRes::Yes, RecordPartialRes::Yes,
None,
); );
} }
@ -4084,14 +4086,21 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
source: PathSource<'ast>, source: PathSource<'ast>,
finalize: Finalize, finalize: Finalize,
record_partial_res: RecordPartialRes, record_partial_res: RecordPartialRes,
parent_qself: Option<&QSelf>,
) -> PartialRes { ) -> PartialRes {
let ns = source.namespace(); let ns = source.namespace();
let Finalize { node_id, path_span, .. } = finalize; let Finalize { node_id, path_span, .. } = finalize;
let report_errors = |this: &mut Self, res: Option<Res>| { let report_errors = |this: &mut Self, res: Option<Res>| {
if this.should_report_errs() { if this.should_report_errs() {
let (err, candidates) = let (err, candidates) = this.smart_resolve_report_errors(
this.smart_resolve_report_errors(path, None, path_span, source, res); path,
None,
path_span,
source,
res,
parent_qself,
);
let def_id = this.parent_scope.module.nearest_parent_mod(); let def_id = this.parent_scope.module.nearest_parent_mod();
let instead = res.is_some(); let instead = res.is_some();
@ -4160,6 +4169,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
path_span, path_span,
PathSource::Type, PathSource::Type,
None, None,
parent_qself,
); );
// There are two different error messages user might receive at // There are two different error messages user might receive at
@ -4437,6 +4447,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
PathSource::Trait(AliasPossibility::No), PathSource::Trait(AliasPossibility::No),
Finalize::new(finalize.node_id, qself.path_span), Finalize::new(finalize.node_id, qself.path_span),
RecordPartialRes::No, RecordPartialRes::No,
Some(&qself),
); );
if trait_res.expect_full_res() == Res::Err { if trait_res.expect_full_res() == Res::Err {
@ -4461,6 +4472,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
PathSource::TraitItem(ns), PathSource::TraitItem(ns),
Finalize::with_root_span(finalize.node_id, finalize.path_span, qself.path_span), Finalize::with_root_span(finalize.node_id, finalize.path_span, qself.path_span),
RecordPartialRes::No, RecordPartialRes::No,
Some(&qself),
); );
// The remaining segments (the `C` in our example) will // The remaining segments (the `C` in our example) will

View file

@ -35,7 +35,7 @@ use super::NoConstantGenericsReason;
use crate::diagnostics::{ImportSuggestion, LabelSuggestion, TypoSuggestion}; use crate::diagnostics::{ImportSuggestion, LabelSuggestion, TypoSuggestion};
use crate::late::{ use crate::late::{
AliasPossibility, LateResolutionVisitor, LifetimeBinderKind, LifetimeRes, LifetimeRibKind, AliasPossibility, LateResolutionVisitor, LifetimeBinderKind, LifetimeRes, LifetimeRibKind,
LifetimeUseSet, RibKind, LifetimeUseSet, QSelf, RibKind,
}; };
use crate::ty::fast_reject::SimplifiedType; use crate::ty::fast_reject::SimplifiedType;
use crate::{ use crate::{
@ -421,6 +421,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
span: Span, span: Span,
source: PathSource<'_>, source: PathSource<'_>,
res: Option<Res>, res: Option<Res>,
qself: Option<&QSelf>,
) -> (Diag<'tcx>, Vec<ImportSuggestion>) { ) -> (Diag<'tcx>, Vec<ImportSuggestion>) {
debug!(?res, ?source); debug!(?res, ?source);
let base_error = self.make_base_error(path, span, source, res); let base_error = self.make_base_error(path, span, source, res);
@ -429,6 +430,14 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
let mut err = self.r.dcx().struct_span_err(base_error.span, base_error.msg.clone()); let mut err = self.r.dcx().struct_span_err(base_error.span, base_error.msg.clone());
err.code(code); err.code(code);
// Try to get the span of the identifier within the path's syntax context
// (if that's different).
if let Some(within_macro_span) =
base_error.span.within_macro(span, self.r.tcx.sess.source_map())
{
err.span_label(within_macro_span, "due to this macro variable");
}
self.detect_missing_binding_available_from_pattern(&mut err, path, following_seg); self.detect_missing_binding_available_from_pattern(&mut err, path, following_seg);
self.suggest_at_operator_in_slice_pat_with_range(&mut err, path); self.suggest_at_operator_in_slice_pat_with_range(&mut err, path);
self.suggest_swapping_misplaced_self_ty_and_trait(&mut err, source, res, base_error.span); self.suggest_swapping_misplaced_self_ty_and_trait(&mut err, source, res, base_error.span);
@ -453,6 +462,15 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
self.suggest_self_or_self_ref(&mut err, path, span); self.suggest_self_or_self_ref(&mut err, path, span);
self.detect_assoc_type_constraint_meant_as_path(&mut err, &base_error); self.detect_assoc_type_constraint_meant_as_path(&mut err, &base_error);
self.detect_rtn_with_fully_qualified_path(
&mut err,
path,
following_seg,
span,
source,
res,
qself,
);
if self.suggest_self_ty(&mut err, source, path, span) if self.suggest_self_ty(&mut err, source, path, span)
|| self.suggest_self_value(&mut err, source, path, span) || self.suggest_self_value(&mut err, source, path, span)
{ {
@ -501,6 +519,33 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
(err, candidates) (err, candidates)
} }
fn detect_rtn_with_fully_qualified_path(
&self,
err: &mut Diag<'_>,
path: &[Segment],
following_seg: Option<&Segment>,
span: Span,
source: PathSource<'_>,
res: Option<Res>,
qself: Option<&QSelf>,
) {
if let Some(Res::Def(DefKind::AssocFn, _)) = res
&& let PathSource::TraitItem(TypeNS) = source
&& let None = following_seg
&& let Some(qself) = qself
&& let TyKind::Path(None, ty_path) = &qself.ty.kind
&& ty_path.segments.len() == 1
&& self.diag_metadata.current_where_predicate.is_some()
{
err.span_suggestion_verbose(
span,
"you might have meant to use the return type notation syntax",
format!("{}::{}(..)", ty_path.segments[0].ident, path[path.len() - 1].ident),
Applicability::MaybeIncorrect,
);
}
}
fn detect_assoc_type_constraint_meant_as_path( fn detect_assoc_type_constraint_meant_as_path(
&self, &self,
err: &mut Diag<'_>, err: &mut Diag<'_>,

View file

@ -88,10 +88,9 @@ impl RustcInternal for Pattern {
type T<'tcx> = rustc_ty::Pattern<'tcx>; type T<'tcx> = rustc_ty::Pattern<'tcx>;
fn internal<'tcx>(&self, tables: &mut Tables<'_>, tcx: TyCtxt<'tcx>) -> Self::T<'tcx> { fn internal<'tcx>(&self, tables: &mut Tables<'_>, tcx: TyCtxt<'tcx>) -> Self::T<'tcx> {
tcx.mk_pat(match self { tcx.mk_pat(match self {
Pattern::Range { start, end, include_end } => rustc_ty::PatternKind::Range { Pattern::Range { start, end, include_end: _ } => rustc_ty::PatternKind::Range {
start: start.as_ref().map(|c| c.internal(tables, tcx)), start: start.as_ref().unwrap().internal(tables, tcx),
end: end.as_ref().map(|c| c.internal(tables, tcx)), end: end.as_ref().unwrap().internal(tables, tcx),
include_end: *include_end,
}, },
}) })
} }

View file

@ -405,10 +405,11 @@ impl<'tcx> Stable<'tcx> for ty::Pattern<'tcx> {
fn stable(&self, tables: &mut Tables<'_>) -> Self::T { fn stable(&self, tables: &mut Tables<'_>) -> Self::T {
match **self { match **self {
ty::PatternKind::Range { start, end, include_end } => stable_mir::ty::Pattern::Range { ty::PatternKind::Range { start, end } => stable_mir::ty::Pattern::Range {
start: start.stable(tables), // FIXME(SMIR): update data structures to not have an Option here anymore
end: end.stable(tables), start: Some(start.stable(tables)),
include_end, end: Some(end.stable(tables)),
include_end: true,
}, },
} }
} }

View file

@ -1173,6 +1173,8 @@ pub enum DesugaringKind {
BoundModifier, BoundModifier,
/// Calls to contract checks (`#[requires]` to precond, `#[ensures]` to postcond) /// Calls to contract checks (`#[requires]` to precond, `#[ensures]` to postcond)
Contract, Contract,
/// A pattern type range start/end
PatTyRange,
} }
impl DesugaringKind { impl DesugaringKind {
@ -1190,6 +1192,7 @@ impl DesugaringKind {
DesugaringKind::WhileLoop => "`while` loop", DesugaringKind::WhileLoop => "`while` loop",
DesugaringKind::BoundModifier => "trait bound modifier", DesugaringKind::BoundModifier => "trait bound modifier",
DesugaringKind::Contract => "contract check", DesugaringKind::Contract => "contract check",
DesugaringKind::PatTyRange => "pattern type",
} }
} }
} }

View file

@ -1057,6 +1057,37 @@ impl Span {
} }
} }
/// Returns the `Span` within the syntax context of "within". This is useful when
/// "self" is an expansion from a macro variable, since this can be used for
/// providing extra macro expansion context for certain errors.
///
/// ```text
/// macro_rules! m {
/// ($ident:ident) => { ($ident,) }
/// }
///
/// m!(outer_ident);
/// ```
///
/// If "self" is the span of the outer_ident, and "within" is the span of the `($ident,)`
/// expr, then this will return the span of the `$ident` macro variable.
pub fn within_macro(self, within: Span, sm: &SourceMap) -> Option<Span> {
match Span::prepare_to_combine(self, within) {
// Only return something if it doesn't overlap with the original span,
// and the span isn't "imported" (i.e. from unavailable sources).
// FIXME: This does limit the usefulness of the error when the macro is
// from a foreign crate; we could also take into account `-Zmacro-backtrace`,
// which doesn't redact this span (but that would mean passing in even more
// args to this function, lol).
Ok((self_, _, parent))
if self_.hi < self.lo() || self.hi() < self_.lo && !sm.is_imported(within) =>
{
Some(Span::new(self_.lo, self_.hi, self_.ctxt, parent))
}
_ => None,
}
}
pub fn from_inner(self, inner: InnerSpan) -> Span { pub fn from_inner(self, inner: InnerSpan) -> Span {
let span = self.data(); let span = self.data();
Span::new( Span::new(

View file

@ -308,6 +308,9 @@ symbols! {
RangeFull, RangeFull,
RangeInclusive, RangeInclusive,
RangeInclusiveCopy, RangeInclusiveCopy,
RangeMax,
RangeMin,
RangeSub,
RangeTo, RangeTo,
RangeToInclusive, RangeToInclusive,
Rc, Rc,
@ -1522,6 +1525,7 @@ symbols! {
pattern_complexity_limit, pattern_complexity_limit,
pattern_parentheses, pattern_parentheses,
pattern_type, pattern_type,
pattern_type_range_trait,
pattern_types, pattern_types,
permissions_from_mode, permissions_from_mode,
phantom_data, phantom_data,

View file

@ -413,12 +413,8 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> {
} }
ty::Pat(ty, pat) => match *pat { ty::Pat(ty, pat) => match *pat {
ty::PatternKind::Range { start, end, include_end } => { ty::PatternKind::Range { start, end } => {
let consts = [ let consts = [start, end];
start.unwrap_or(self.tcx.consts.unit),
end.unwrap_or(self.tcx.consts.unit),
ty::Const::from_bool(self.tcx, include_end),
];
// HACK: Represent as tuple until we have something better. // HACK: Represent as tuple until we have something better.
// HACK: constants are used in arrays, even if the types don't match. // HACK: constants are used in arrays, even if the types don't match.
self.push("T"); self.push("T");

View file

@ -1,8 +1,6 @@
use std::ops::ControlFlow; use std::ops::ControlFlow;
use rustc_errors::{ use rustc_errors::{Applicability, Diag, E0283, E0284, E0790, MultiSpan, struct_span_code_err};
Applicability, Diag, E0283, E0284, E0790, MultiSpan, StashKey, struct_span_code_err,
};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::LangItem; use rustc_hir::LangItem;
use rustc_hir::def::{DefKind, Res}; use rustc_hir::def::{DefKind, Res};
@ -197,7 +195,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
// be ignoring the fact that we don't KNOW the type works // be ignoring the fact that we don't KNOW the type works
// out. Though even that would probably be harmless, given that // out. Though even that would probably be harmless, given that
// we're only talking about builtin traits, which are known to be // we're only talking about builtin traits, which are known to be
// inhabited. We used to check for `self.tcx.sess.has_errors()` to // inhabited. We used to check for `self.tainted_by_errors()` to
// avoid inundating the user with unnecessary errors, but we now // avoid inundating the user with unnecessary errors, but we now
// check upstream for type errors and don't add the obligations to // check upstream for type errors and don't add the obligations to
// begin with in those cases. // begin with in those cases.
@ -211,7 +209,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
TypeAnnotationNeeded::E0282, TypeAnnotationNeeded::E0282,
false, false,
); );
return err.stash(span, StashKey::MaybeForgetReturn).unwrap(); return err.emit();
} }
Some(e) => return e, Some(e) => return e,
} }

View file

@ -829,7 +829,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
&& let ty::Closure(closure_def_id, _) | ty::CoroutineClosure(closure_def_id, _) = && let ty::Closure(closure_def_id, _) | ty::CoroutineClosure(closure_def_id, _) =
*typeck_results.node_type(arg_hir_id).kind() *typeck_results.node_type(arg_hir_id).kind()
{ {
// Otherwise, extract the closure kind from the obligation. // Otherwise, extract the closure kind from the obligation,
// but only if we actually have an argument to deduce the
// closure type from...
let mut err = self.report_closure_error( let mut err = self.report_closure_error(
&obligation, &obligation,
closure_def_id, closure_def_id,
@ -844,7 +846,17 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
let self_ty = trait_pred.self_ty().skip_binder(); let self_ty = trait_pred.self_ty().skip_binder();
let (expected_kind, trait_prefix) =
if let Some(expected_kind) = self.tcx.fn_trait_kind_from_def_id(trait_pred.def_id()) { if let Some(expected_kind) = self.tcx.fn_trait_kind_from_def_id(trait_pred.def_id()) {
(expected_kind, "")
} else if let Some(expected_kind) =
self.tcx.async_fn_trait_kind_from_def_id(trait_pred.def_id())
{
(expected_kind, "Async")
} else {
return None;
};
let (closure_def_id, found_args, by_ref_captures) = match *self_ty.kind() { let (closure_def_id, found_args, by_ref_captures) = match *self_ty.kind() {
ty::Closure(def_id, args) => { ty::Closure(def_id, args) => {
(def_id, args.as_closure().sig().map_bound(|sig| sig.inputs()[0]), None) (def_id, args.as_closure().sig().map_bound(|sig| sig.inputs()[0]), None)
@ -859,8 +871,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
_ => return None, _ => return None,
}; };
let expected_args = let expected_args = trait_pred.map_bound(|trait_pred| trait_pred.trait_ref.args.type_at(1));
trait_pred.map_bound(|trait_pred| trait_pred.trait_ref.args.type_at(1));
// Verify that the arguments are compatible. If the signature is // Verify that the arguments are compatible. If the signature is
// mismatched, then we have a totally different error to report. // mismatched, then we have a totally different error to report.
@ -880,7 +891,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
closure_def_id, closure_def_id,
found_kind, found_kind,
expected_kind, expected_kind,
"", trait_prefix,
); );
self.note_obligation_cause(&mut err, &obligation); self.note_obligation_cause(&mut err, &obligation);
return Some(err.emit()); return Some(err.emit());
@ -900,7 +911,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
self.note_obligation_cause(&mut err, &obligation); self.note_obligation_cause(&mut err, &obligation);
return Some(err.emit()); return Some(err.emit());
} }
}
None None
} }

View file

@ -2695,7 +2695,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
| ObligationCauseCode::LetElse | ObligationCauseCode::LetElse
| ObligationCauseCode::BinOp { .. } | ObligationCauseCode::BinOp { .. }
| ObligationCauseCode::AscribeUserTypeProvePredicate(..) | ObligationCauseCode::AscribeUserTypeProvePredicate(..)
| ObligationCauseCode::DropImpl | ObligationCauseCode::AlwaysApplicableImpl
| ObligationCauseCode::ConstParam(_) | ObligationCauseCode::ConstParam(_)
| ObligationCauseCode::ReferenceOutlivesReferent(..) | ObligationCauseCode::ReferenceOutlivesReferent(..)
| ObligationCauseCode::ObjectTypeBound(..) => {} | ObligationCauseCode::ObjectTypeBound(..) => {}
@ -3191,7 +3191,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
false false
}; };
if !is_upvar_tys_infer_tuple { let is_builtin_async_fn_trait =
tcx.async_fn_trait_kind_from_def_id(data.parent_trait_pred.def_id()).is_some();
if !is_upvar_tys_infer_tuple && !is_builtin_async_fn_trait {
let ty_str = tcx.short_string(ty, err.long_ty_path()); let ty_str = tcx.short_string(ty, err.long_ty_path());
let msg = format!("required because it appears within the type `{ty_str}`"); let msg = format!("required because it appears within the type `{ty_str}`");
match ty.kind() { match ty.kind() {

View file

@ -983,8 +983,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
return Err(SelectionError::Unimplemented); return Err(SelectionError::Unimplemented);
} }
} else { } else {
nested.push(obligation.with( nested.push(Obligation::new(
self.tcx(), self.tcx(),
obligation.derived_cause(ObligationCauseCode::BuiltinDerived),
obligation.param_env,
ty::TraitRef::new( ty::TraitRef::new(
self.tcx(), self.tcx(),
self.tcx().require_lang_item( self.tcx().require_lang_item(

View file

@ -708,7 +708,7 @@ impl<'a, 'tcx> TypeVisitor<TyCtxt<'tcx>> for WfPredicates<'a, 'tcx> {
ty::Pat(subty, pat) => { ty::Pat(subty, pat) => {
self.require_sized(subty, ObligationCauseCode::Misc); self.require_sized(subty, ObligationCauseCode::Misc);
match *pat { match *pat {
ty::PatternKind::Range { start, end, include_end: _ } => { ty::PatternKind::Range { start, end } => {
let mut check = |c| { let mut check = |c| {
let cause = self.cause(ObligationCauseCode::Misc); let cause = self.cause(ObligationCauseCode::Misc);
self.out.push(traits::Obligation::with_depth( self.out.push(traits::Obligation::with_depth(
@ -738,12 +738,8 @@ impl<'a, 'tcx> TypeVisitor<TyCtxt<'tcx>> for WfPredicates<'a, 'tcx> {
} }
} }
}; };
if let Some(start) = start { check(start);
check(start) check(end);
}
if let Some(end) = end {
check(end)
}
} }
} }
} }

View file

@ -205,24 +205,17 @@ fn layout_of_uncached<'tcx>(
let layout = cx.layout_of(ty)?.layout; let layout = cx.layout_of(ty)?.layout;
let mut layout = LayoutData::clone(&layout.0); let mut layout = LayoutData::clone(&layout.0);
match *pat { match *pat {
ty::PatternKind::Range { start, end, include_end } => { ty::PatternKind::Range { start, end } => {
if let BackendRepr::Scalar(scalar) | BackendRepr::ScalarPair(scalar, _) = if let BackendRepr::Scalar(scalar) | BackendRepr::ScalarPair(scalar, _) =
&mut layout.backend_repr &mut layout.backend_repr
{ {
if let Some(start) = start {
scalar.valid_range_mut().start = extract_const_value(cx, ty, start)? scalar.valid_range_mut().start = extract_const_value(cx, ty, start)?
.try_to_bits(tcx, cx.typing_env) .try_to_bits(tcx, cx.typing_env)
.ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?; .ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?;
}
if let Some(end) = end { scalar.valid_range_mut().end = extract_const_value(cx, ty, end)?
let mut end = extract_const_value(cx, ty, end)?
.try_to_bits(tcx, cx.typing_env) .try_to_bits(tcx, cx.typing_env)
.ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?; .ok_or_else(|| error(cx, LayoutError::Unknown(ty)))?;
if !include_end {
end = end.wrapping_sub(1);
}
scalar.valid_range_mut().end = end;
}
let niche = Niche { let niche = Niche {
offset: Size::ZERO, offset: Size::ZERO,

View file

@ -61,9 +61,9 @@ dependencies = [
[[package]] [[package]]
name = "compiler_builtins" name = "compiler_builtins"
version = "0.1.150" version = "0.1.151"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c42734e0ccf0d9f953165770593a75306f0b24dda1aa03f115c70748726dbca" checksum = "abc30f1766d387c35f2405e586d3e7a88230dc728ff78cd1d0bc59ae0b63154b"
dependencies = [ dependencies = [
"cc", "cc",
"rustc-std-workspace-core", "rustc-std-workspace-core",

View file

@ -12,7 +12,7 @@ edition = "2021"
[dependencies] [dependencies]
core = { path = "../core", public = true } core = { path = "../core", public = true }
compiler_builtins = { version = "=0.1.150", features = ['rustc-dep-of-std'] } compiler_builtins = { version = "=0.1.151", features = ['rustc-dep-of-std'] }
[dev-dependencies] [dev-dependencies]
rand = { version = "0.9.0", default-features = false, features = ["alloc"] } rand = { version = "0.9.0", default-features = false, features = ["alloc"] }

View file

@ -151,7 +151,9 @@ impl fmt::Display for ByteStr {
}; };
let nchars: usize = self let nchars: usize = self
.utf8_chunks() .utf8_chunks()
.map(|chunk| chunk.valid().len() + if chunk.invalid().is_empty() { 0 } else { 1 }) .map(|chunk| {
chunk.valid().chars().count() + if chunk.invalid().is_empty() { 0 } else { 1 }
})
.sum(); .sum();
let padding = f.width().unwrap_or(0).saturating_sub(nchars); let padding = f.width().unwrap_or(0).saturating_sub(nchars);
let fill = f.fill(); let fill = f.fill();

View file

@ -778,7 +778,6 @@ impl<T> From<T> for T {
/// ///
/// [#64715]: https://github.com/rust-lang/rust/issues/64715 /// [#64715]: https://github.com/rust-lang/rust/issues/64715
#[stable(feature = "convert_infallible", since = "1.34.0")] #[stable(feature = "convert_infallible", since = "1.34.0")]
#[allow(unused_attributes)] // FIXME(#58633): do a principled fix instead.
#[rustc_reservation_impl = "permitting this impl would forbid us from adding \ #[rustc_reservation_impl = "permitting this impl would forbid us from adding \
`impl<T> From<!> for T` later; see rust-lang/rust#64715 for details"] `impl<T> From<!> for T` later; see rust-lang/rust#64715 for details"]
impl<T> From<!> for T { impl<T> From<!> for T {

View file

@ -1,11 +1,16 @@
use crate::fmt; use crate::fmt;
use crate::iter::FusedIterator; use crate::iter::FusedIterator;
/// Creates a new iterator where each successive item is computed based on the preceding one. /// Creates an iterator which, starting from an initial item,
/// computes each successive item from the preceding one.
/// ///
/// The iterator starts with the given first item (if any) /// This iterator stores an optional item (`Option<T>`) and a successor closure (`impl FnMut(&T) -> Option<T>`).
/// and calls the given `FnMut(&T) -> Option<T>` closure to compute each items successor. /// Its `next` method returns the stored optional item and
/// The iterator will yield the `T`s returned from the closure. /// if it is `Some(val)` calls the stored closure on `&val` to compute and store its successor.
/// The iterator will apply the closure successively to the stored option's value until the option is `None`.
/// This also means that once the stored option is `None` it will remain `None`,
/// as the closure will not be called again, so the created iterator is a [`FusedIterator`].
/// The iterator's items will be the initial item and all of its successors as calculated by the successor closure.
/// ///
/// ``` /// ```
/// use std::iter::successors; /// use std::iter::successors;
@ -24,7 +29,8 @@ where
Successors { next: first, succ } Successors { next: first, succ }
} }
/// A new iterator where each successive item is computed based on the preceding one. /// An iterator which, starting from an initial item,
/// computes each successive item from the preceding one.
/// ///
/// This `struct` is created by the [`iter::successors()`] function. /// This `struct` is created by the [`iter::successors()`] function.
/// See its documentation for more. /// See its documentation for more.

View file

@ -453,8 +453,8 @@ impl Copy for ! {}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Copy for &T {} impl<T: ?Sized> Copy for &T {}
/// Marker trait for the types that are allowed in union fields, unsafe fields, /// Marker trait for the types that are allowed in union fields and unsafe
/// and unsafe binder types. /// binder types.
/// ///
/// Implemented for: /// Implemented for:
/// * `&T`, `&mut T` for all `T`, /// * `&T`, `&mut T` for all `T`,

View file

@ -12,3 +12,65 @@ macro_rules! pattern_type {
/* compiler built-in */ /* compiler built-in */
}; };
} }
/// A trait implemented for integer types and `char`.
/// Useful in the future for generic pattern types, but
/// used right now to simplify ast lowering of pattern type ranges.
#[unstable(feature = "pattern_type_range_trait", issue = "123646")]
#[rustc_const_unstable(feature = "pattern_type_range_trait", issue = "123646")]
#[const_trait]
#[diagnostic::on_unimplemented(
message = "`{Self}` is not a valid base type for range patterns",
label = "only integer types and `char` are supported"
)]
pub trait RangePattern {
/// Trait version of the inherent `MIN` assoc const.
#[cfg_attr(not(bootstrap), lang = "RangeMin")]
const MIN: Self;
/// Trait version of the inherent `MIN` assoc const.
#[cfg_attr(not(bootstrap), lang = "RangeMax")]
const MAX: Self;
/// A compile-time helper to subtract 1 for exclusive ranges.
#[cfg_attr(not(bootstrap), lang = "RangeSub")]
#[track_caller]
fn sub_one(self) -> Self;
}
macro_rules! impl_range_pat {
($($ty:ty,)*) => {
$(
#[rustc_const_unstable(feature = "pattern_type_range_trait", issue = "123646")]
impl const RangePattern for $ty {
const MIN: $ty = <$ty>::MIN;
const MAX: $ty = <$ty>::MAX;
fn sub_one(self) -> Self {
match self.checked_sub(1) {
Some(val) => val,
None => panic!("exclusive range end at minimum value of type")
}
}
}
)*
}
}
impl_range_pat! {
i8, i16, i32, i64, i128, isize,
u8, u16, u32, u64, u128, usize,
}
#[rustc_const_unstable(feature = "pattern_type_range_trait", issue = "123646")]
impl const RangePattern for char {
const MIN: Self = char::MIN;
const MAX: Self = char::MAX;
fn sub_one(self) -> Self {
match char::from_u32(self as u32 - 1) {
None => panic!("exclusive range to start of valid chars"),
Some(val) => val,
}
}
}

View file

@ -18,7 +18,7 @@ cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] }
panic_unwind = { path = "../panic_unwind", optional = true } panic_unwind = { path = "../panic_unwind", optional = true }
panic_abort = { path = "../panic_abort" } panic_abort = { path = "../panic_abort" }
core = { path = "../core", public = true } core = { path = "../core", public = true }
compiler_builtins = { version = "=0.1.150" } compiler_builtins = { version = "=0.1.151" }
unwind = { path = "../unwind" } unwind = { path = "../unwind" }
hashbrown = { version = "0.15", default-features = false, features = [ hashbrown = { version = "0.15", default-features = false, features = [
'rustc-dep-of-std', 'rustc-dep-of-std',

View file

@ -641,11 +641,6 @@ impl Error for JoinPathsError {
/// None => println!("Impossible to get your home dir!"), /// None => println!("Impossible to get your home dir!"),
/// } /// }
/// ``` /// ```
#[deprecated(
since = "1.29.0",
note = "This function's behavior may be unexpected on Windows. \
Consider using a crate from crates.io instead."
)]
#[must_use] #[must_use]
#[stable(feature = "env", since = "1.0.0")] #[stable(feature = "env", since = "1.0.0")]
pub fn home_dir() -> Option<PathBuf> { pub fn home_dir() -> Option<PathBuf> {

View file

@ -54,7 +54,7 @@ runners:
<<: *base-job <<: *base-job
- &job-aarch64-linux-8c - &job-aarch64-linux-8c
os: ubuntu-22.04-arm64-8core-32gb os: ubuntu-24.04-arm64-8core-32gb
<<: *base-job <<: *base-job
envs: envs:
env-x86_64-apple-tests: &env-x86_64-apple-tests env-x86_64-apple-tests: &env-x86_64-apple-tests

View file

@ -69,6 +69,7 @@
- [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md)
- [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md)
- [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md) - [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md)
- [powerpc-unknown-linux-gnuspe](platform-support/powerpc-unknown-linux-gnuspe.md)
- [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md) - [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md)
- [powerpc64-ibm-aix](platform-support/aix.md) - [powerpc64-ibm-aix](platform-support/aix.md)
- [powerpc64le-unknown-linux-musl](platform-support/powerpc64le-unknown-linux-musl.md) - [powerpc64le-unknown-linux-musl](platform-support/powerpc64le-unknown-linux-musl.md)

View file

@ -348,9 +348,9 @@ target | std | host | notes
[`mipsisa64r6el-unknown-linux-gnuabi64`](platform-support/mips-release-6.md) | ✓ | ✓ | 64-bit MIPS Release 6 Little Endian [`mipsisa64r6el-unknown-linux-gnuabi64`](platform-support/mips-release-6.md) | ✓ | ✓ | 64-bit MIPS Release 6 Little Endian
`msp430-none-elf` | * | | 16-bit MSP430 microcontrollers `msp430-none-elf` | * | | 16-bit MSP430 microcontrollers
[`powerpc-unknown-freebsd`](platform-support/freebsd.md) | ? | | PowerPC FreeBSD [`powerpc-unknown-freebsd`](platform-support/freebsd.md) | ? | | PowerPC FreeBSD
`powerpc-unknown-linux-gnuspe` | ✓ | | PowerPC SPE Linux [`powerpc-unknown-linux-gnuspe`](platform-support/powerpc-unknown-linux-gnuspe.md) | ✓ | | PowerPC SPE Linux
`powerpc-unknown-linux-musl` | ? | | PowerPC Linux with musl 1.2.3 `powerpc-unknown-linux-musl` | ? | | PowerPC Linux with musl 1.2.3
[`powerpc-unknown-linux-muslspe`](platform-support/powerpc-unknown-linux-muslspe.md) | ? | | PowerPC SPE Linux [`powerpc-unknown-linux-muslspe`](platform-support/powerpc-unknown-linux-muslspe.md) | ? | | PowerPC SPE Linux with musl 1.2.3
[`powerpc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD 32-bit powerpc systems [`powerpc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD 32-bit powerpc systems
[`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | | [`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | |
[`powerpc-wrs-vxworks`](platform-support/vxworks.md) | ✓ | | [`powerpc-wrs-vxworks`](platform-support/vxworks.md) | ✓ | |

View file

@ -0,0 +1,20 @@
# powerpc-unknown-linux-gnuspe
**Tier: 3**
`powerpc-unknown-linux-gnuspe` is a target for Linux on 32-bit PowerPC
processors that implement the Signal Processing Engine (SPE), such as e500, and
uses a different ABI than standard `powerpc-unknown-linux-gnu`.
When building for other 32-bit PowerPC processors, use
`powerpc-unknown-linux-gnu` instead.
See also [Debian Wiki](https://wiki.debian.org/PowerPCSPEPort) for details on
this platform, and [ABI reference](https://web.archive.org/web/20120608163804/https://www.power.org/resources/downloads/Power-Arch-32-bit-ABI-supp-1.0-Unified.pdf)
for details on SPE ABI.
Note that support for PowerPC SPE by GCC was [removed in GCC 9](https://gcc.gnu.org/gcc-8/changes.html),
so recent GCC cannot be used as linker/compiler for this target.
## Target maintainers
There are currently no formally documented target maintainers.

View file

@ -2,9 +2,11 @@
**Tier: 3** **Tier: 3**
This target is very similar to already existing ones like `powerpc_unknown_linux_musl` and `powerpc_unknown_linux_gnuspe`. This target is very similar to already existing ones like `powerpc-unknown-linux-musl` and `powerpc-unknown-linux-gnuspe`.
This one has PowerPC SPE support for musl. Unfortunately, the last supported gcc version with PowerPC SPE is 8.4.0. This one has PowerPC SPE support for musl. Unfortunately, the last supported gcc version with PowerPC SPE is 8.4.0.
See also [platform support documentation of `powerpc-unknown-linux-gnuspe`](powerpc-unknown-linux-gnuspe.md) for information about PowerPC SPE.
## Target maintainers ## Target maintainers
- [@BKPepe](https://github.com/BKPepe) - [@BKPepe](https://github.com/BKPepe)

View file

@ -9,12 +9,32 @@ The tracking issue for this feature is: [#123076]
This feature is incomplete and not yet intended for general use. This feature is incomplete and not yet intended for general use.
This implements experimental, Edition-dependent match ergonomics under consideration for inclusion This implements experimental, Edition-dependent match ergonomics under consideration for inclusion
in Rust. in Rust, allowing `&` patterns in more places. For example:
For more information, see the corresponding typing rules for [Editions 2021 and earlier] and for ```rust,edition2024
[Editions 2024 and later]. #![feature(ref_pat_eat_one_layer_2024_structural)]
#![allow(incomplete_features)]
#
# // Tests type equality in a way that avoids coercing `&&T` or `&mut T` to `&T`.
# trait Eq<T> {}
# impl<T> Eq<T> for T {}
# fn has_type<T>(_: impl Eq<T>) {}
// `&` can match against a `ref` binding mode instead of a reference type:
let (x, &y) = &(0, 1);
has_type::<&u8>(x);
has_type::<u8>(y);
// `&` can match against `&mut` references:
let &z = &mut 2;
has_type::<u8>(z);
```
For specifics, see the corresponding typing rules for [Editions 2021 and earlier] and for
[Editions 2024 and later]. For more information on binding modes, see [The Rust Reference].
For alternative experimental match ergonomics, see the feature For alternative experimental match ergonomics, see the feature
[`ref_pat_eat_one_layer_2024`](./ref-pat-eat-one-layer-2024.md). [`ref_pat_eat_one_layer_2024`](./ref-pat-eat-one-layer-2024.md).
[Editions 2021 and earlier]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAQIBAQEBAAAAAAAAAAAAAAAAAAA%3D&mode=rules&do_cmp=false [Editions 2021 and earlier]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAQIBAQEBAAAAAAAAAAAAAAAAAAA%3D&mode=rules&do_cmp=false
[Editions 2024 and later]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAgEBAQEBAgIAAAAAAAAAAAAAAAA%3D&mode=rules&do_cmp=false [Editions 2024 and later]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAgEBAQEBAgIAAAAAAAAAAAAAAAA%3D&mode=rules&do_cmp=false
[The Rust Reference]: https://doc.rust-lang.org/reference/patterns.html#binding-modes

View file

@ -9,12 +9,33 @@ The tracking issue for this feature is: [#123076]
This feature is incomplete and not yet intended for general use. This feature is incomplete and not yet intended for general use.
This implements experimental, Edition-dependent match ergonomics under consideration for inclusion This implements experimental, Edition-dependent match ergonomics under consideration for inclusion
in Rust. in Rust, allowing `&` patterns in more places. For example:
For more information, see the corresponding typing rules for [Editions 2021 and earlier] and for
[Editions 2024 and later]. ```rust,edition2024
#![feature(ref_pat_eat_one_layer_2024)]
#![allow(incomplete_features)]
#
# // Tests type equality in a way that avoids coercing `&&T` or `&mut T` to `&T`.
# trait Eq<T> {}
# impl<T> Eq<T> for T {}
# fn has_type<T>(_: impl Eq<T>) {}
// `&` can match against a `ref` binding mode instead of a reference type:
let (x, &y) = &(0, 1);
has_type::<&u8>(x);
has_type::<u8>(y);
// `&` can match against `&mut` references:
let &z = &mut 2;
has_type::<u8>(z);
```
For specifics, see the corresponding typing rules for [Editions 2021 and earlier] and for
[Editions 2024 and later]. For more information on binding modes, see [The Rust Reference].
For alternative experimental match ergonomics, see the feature For alternative experimental match ergonomics, see the feature
[`ref_pat_eat_one_layer_2024_structural`](./ref-pat-eat-one-layer-2024-structural.md). [`ref_pat_eat_one_layer_2024_structural`](./ref-pat-eat-one-layer-2024-structural.md).
[Editions 2021 and earlier]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAQIBAQABAAAAAQEBAAEBAAABAAA%3D&mode=rules&do_cmp=false [Editions 2021 and earlier]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAQIBAQABAAAAAQEBAAEBAAABAAA%3D&mode=rules&do_cmp=false
[Editions 2024 and later]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAAABAQABAgIAAQEBAAEBAAABAAA%3D&mode=rules&do_cmp=false [Editions 2024 and later]: https://nadrieril.github.io/typing-rust-patterns/?compare=false&opts1=AQEBAAABAQABAgIAAQEBAAEBAAABAAA%3D&mode=rules&do_cmp=false
[The Rust Reference]: https://doc.rust-lang.org/reference/patterns.html#binding-modes

View file

@ -7,9 +7,8 @@ use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
use tracing::debug; use tracing::debug;
use crate::clean;
use crate::clean::utils::inherits_doc_hidden; use crate::clean::utils::inherits_doc_hidden;
use crate::clean::{Item, ItemIdSet}; use crate::clean::{self, Item, ItemIdSet, reexport_chain};
use crate::core::DocContext; use crate::core::DocContext;
use crate::fold::{DocFolder, strip_item}; use crate::fold::{DocFolder, strip_item};
use crate::passes::{ImplStripper, Pass}; use crate::passes::{ImplStripper, Pass};
@ -89,6 +88,25 @@ impl Stripper<'_, '_> {
impl DocFolder for Stripper<'_, '_> { impl DocFolder for Stripper<'_, '_> {
fn fold_item(&mut self, i: Item) -> Option<Item> { fn fold_item(&mut self, i: Item) -> Option<Item> {
let has_doc_hidden = i.is_doc_hidden(); let has_doc_hidden = i.is_doc_hidden();
if let clean::ImportItem(clean::Import { source, .. }) = &i.kind
&& let Some(source_did) = source.did
&& let Some(import_def_id) = i.def_id().and_then(|def_id| def_id.as_local())
{
let reexports = reexport_chain(self.tcx, import_def_id, source_did);
// Check if any reexport in the chain has a hidden source
let has_hidden_source = reexports
.iter()
.filter_map(|reexport| reexport.id())
.any(|reexport_did| self.tcx.is_doc_hidden(reexport_did))
|| self.tcx.is_doc_hidden(source_did);
if has_hidden_source {
return None;
}
}
let is_impl_or_exported_macro = match i.kind { let is_impl_or_exported_macro = match i.kind {
clean::ImplItem(..) => true, clean::ImplItem(..) => true,
// If the macro has the `#[macro_export]` attribute, it means it's accessible at the // If the macro has the `#[macro_export]` attribute, it means it's accessible at the

View file

@ -1108,14 +1108,9 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
pub fn hash_ty_pat(&mut self, pat: &TyPat<'_>) { pub fn hash_ty_pat(&mut self, pat: &TyPat<'_>) {
std::mem::discriminant(&pat.kind).hash(&mut self.s); std::mem::discriminant(&pat.kind).hash(&mut self.s);
match pat.kind { match pat.kind {
TyPatKind::Range(s, e, i) => { TyPatKind::Range(s, e) => {
if let Some(s) = s {
self.hash_const_arg(s); self.hash_const_arg(s);
}
if let Some(e) = e {
self.hash_const_arg(e); self.hash_const_arg(e);
}
std::mem::discriminant(&i).hash(&mut self.s);
}, },
TyPatKind::Err(_) => {}, TyPatKind::Err(_) => {},
} }

View file

@ -285,9 +285,19 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
fn expose_ptr(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> { fn expose_provenance(&self, provenance: Provenance) -> InterpResult<'tcx> {
let this = self.eval_context_ref(); let this = self.eval_context_ref();
let mut global_state = this.machine.alloc_addresses.borrow_mut(); let mut global_state = this.machine.alloc_addresses.borrow_mut();
let (alloc_id, tag) = match provenance {
Provenance::Concrete { alloc_id, tag } => (alloc_id, tag),
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
return interp_ok(());
}
};
// In strict mode, we don't need this, so we can save some cycles by not tracking it. // In strict mode, we don't need this, so we can save some cycles by not tracking it.
if global_state.provenance_mode == ProvenanceMode::Strict { if global_state.provenance_mode == ProvenanceMode::Strict {
return interp_ok(()); return interp_ok(());
@ -422,6 +432,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
let rel_offset = this.truncate_to_target_usize(addr.bytes().wrapping_sub(base_addr)); let rel_offset = this.truncate_to_target_usize(addr.bytes().wrapping_sub(base_addr));
Some((alloc_id, Size::from_bytes(rel_offset))) Some((alloc_id, Size::from_bytes(rel_offset)))
} }
/// Prepare all exposed memory for a native call.
/// This overapproximates the modifications which external code might make to memory:
/// We set all reachable allocations as initialized, mark all reachable provenances as exposed
/// and overwrite them with `Provenance::WILDCARD`.
fn prepare_exposed_for_native_call(&mut self) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
// We need to make a deep copy of this list, but it's fine; it also serves as scratch space
// for the search within `prepare_for_native_call`.
let exposed: Vec<AllocId> =
this.machine.alloc_addresses.get_mut().exposed.iter().copied().collect();
this.prepare_for_native_call(exposed)
}
} }
impl<'tcx> MiriMachine<'tcx> { impl<'tcx> MiriMachine<'tcx> {

View file

@ -1291,18 +1291,12 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
/// Called on `ptr as usize` casts. /// Called on `ptr as usize` casts.
/// (Actually computing the resulting `usize` doesn't need machine help, /// (Actually computing the resulting `usize` doesn't need machine help,
/// that's just `Scalar::try_to_int`.) /// that's just `Scalar::try_to_int`.)
#[inline(always)]
fn expose_provenance( fn expose_provenance(
ecx: &InterpCx<'tcx, Self>, ecx: &InterpCx<'tcx, Self>,
provenance: Self::Provenance, provenance: Self::Provenance,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
match provenance { ecx.expose_provenance(provenance)
Provenance::Concrete { alloc_id, tag } => ecx.expose_ptr(alloc_id, tag),
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
interp_ok(())
}
}
} }
/// Convert a pointer with provenance into an allocation-offset pair and extra provenance info. /// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.

View file

@ -160,16 +160,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
} }
let imm = this.read_immediate(arg)?; let imm = this.read_immediate(arg)?;
libffi_args.push(imm_to_carg(&imm, this)?); libffi_args.push(imm_to_carg(&imm, this)?);
// If we are passing a pointer, prepare the memory it points to. // If we are passing a pointer, expose its provenance. Below, all exposed memory
// (previously exposed and new exposed) will then be properly prepared.
if matches!(arg.layout.ty.kind(), ty::RawPtr(..)) { if matches!(arg.layout.ty.kind(), ty::RawPtr(..)) {
let ptr = imm.to_scalar().to_pointer(this)?; let ptr = imm.to_scalar().to_pointer(this)?;
let Some(prov) = ptr.provenance else { let Some(prov) = ptr.provenance else {
// Pointer without provenance may not access any memory. // Pointer without provenance may not access any memory anyway, skip.
continue;
};
// We use `get_alloc_id` for its best-effort behaviour with Wildcard provenance.
let Some(alloc_id) = prov.get_alloc_id() else {
// Wildcard pointer, whatever it points to must be already exposed.
continue; continue;
}; };
// The first time this happens, print a warning. // The first time this happens, print a warning.
@ -178,12 +174,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
this.emit_diagnostic(NonHaltingDiagnostic::NativeCallSharedMem); this.emit_diagnostic(NonHaltingDiagnostic::NativeCallSharedMem);
} }
this.prepare_for_native_call(alloc_id, prov)?; this.expose_provenance(prov)?;
} }
} }
// FIXME: In the future, we should also call `prepare_for_native_call` on all previously // Prepare all exposed memory.
// exposed allocations, since C may access any of them. this.prepare_exposed_for_native_call()?;
// Convert them to `libffi::high::Arg` type. // Convert them to `libffi::high::Arg` type.
let libffi_args = libffi_args let libffi_args = libffi_args

View file

@ -6,7 +6,7 @@
#![feature(box_as_ptr)] #![feature(box_as_ptr)]
use std::mem::MaybeUninit; use std::mem::MaybeUninit;
use std::ptr::null; use std::ptr;
fn main() { fn main() {
test_increment_int(); test_increment_int();
@ -20,6 +20,8 @@ fn main() {
test_pass_dangling(); test_pass_dangling();
test_swap_ptr_triple_dangling(); test_swap_ptr_triple_dangling();
test_return_ptr(); test_return_ptr();
test_pass_ptr_as_int();
test_pass_ptr_via_previously_shared_mem();
} }
/// Test function that modifies an int. /// Test function that modifies an int.
@ -112,7 +114,7 @@ fn test_swap_ptr() {
} }
let x = 61; let x = 61;
let (mut ptr0, mut ptr1) = (&raw const x, null()); let (mut ptr0, mut ptr1) = (&raw const x, ptr::null());
unsafe { swap_ptr(&mut ptr0, &mut ptr1) }; unsafe { swap_ptr(&mut ptr0, &mut ptr1) };
assert_eq!(unsafe { *ptr1 }, x); assert_eq!(unsafe { *ptr1 }, x);
@ -131,7 +133,7 @@ fn test_swap_ptr_tuple() {
} }
let x = 71; let x = 71;
let mut tuple = Tuple { ptr0: &raw const x, ptr1: null() }; let mut tuple = Tuple { ptr0: &raw const x, ptr1: ptr::null() };
unsafe { swap_ptr_tuple(&mut tuple) } unsafe { swap_ptr_tuple(&mut tuple) }
assert_eq!(unsafe { *tuple.ptr1 }, x); assert_eq!(unsafe { *tuple.ptr1 }, x);
@ -148,7 +150,7 @@ fn test_overwrite_dangling() {
drop(b); drop(b);
unsafe { overwrite_ptr(&mut ptr) }; unsafe { overwrite_ptr(&mut ptr) };
assert_eq!(ptr, null()); assert_eq!(ptr, ptr::null());
} }
/// Test function that passes a dangling pointer. /// Test function that passes a dangling pointer.
@ -200,3 +202,33 @@ fn test_return_ptr() {
let ptr = unsafe { return_ptr(ptr) }; let ptr = unsafe { return_ptr(ptr) };
assert_eq!(unsafe { *ptr }, x); assert_eq!(unsafe { *ptr }, x);
} }
/// Test casting a pointer to an integer and passing that to C.
fn test_pass_ptr_as_int() {
extern "C" {
fn pass_ptr_as_int(ptr: usize, set_to_val: i32);
}
let mut m: MaybeUninit<i32> = MaybeUninit::uninit();
unsafe { pass_ptr_as_int(m.as_mut_ptr() as usize, 42) };
assert_eq!(unsafe { m.assume_init() }, 42);
}
fn test_pass_ptr_via_previously_shared_mem() {
extern "C" {
fn set_shared_mem(ptr: *mut *mut i32);
fn init_ptr_stored_in_shared_mem(val: i32);
}
let mut m: *mut i32 = ptr::null_mut();
let ptr_to_m = &raw mut m;
unsafe { set_shared_mem(&raw mut m) };
let mut m2: MaybeUninit<i32> = MaybeUninit::uninit();
// Store a pointer to m2 somewhere that C code can access it.
unsafe { ptr_to_m.write(m2.as_mut_ptr()) };
// Have C code write there.
unsafe { init_ptr_stored_in_shared_mem(42) };
// Ensure this memory is now considered initialized.
assert_eq!(unsafe { m2.assume_init() }, 42);
}

View file

@ -1,33 +1,34 @@
#include <stdio.h> #include <stdio.h>
#include <stdint.h>
// See comments in build_native_lib() // See comments in build_native_lib()
#define EXPORT __attribute__((visibility("default"))) #define EXPORT __attribute__((visibility("default")))
/* Test: test_access_pointer */ /* Test: test_access_pointer */
EXPORT void print_pointer(const int *ptr) { EXPORT void print_pointer(const int32_t *ptr) {
printf("printing pointer dereference from C: %d\n", *ptr); printf("printing pointer dereference from C: %d\n", *ptr);
} }
/* Test: test_access_simple */ /* Test: test_access_simple */
typedef struct Simple { typedef struct Simple {
int field; int32_t field;
} Simple; } Simple;
EXPORT int access_simple(const Simple *s_ptr) { EXPORT int32_t access_simple(const Simple *s_ptr) {
return s_ptr->field; return s_ptr->field;
} }
/* Test: test_access_nested */ /* Test: test_access_nested */
typedef struct Nested { typedef struct Nested {
int value; int32_t value;
struct Nested *next; struct Nested *next;
} Nested; } Nested;
// Returns the innermost/last value of a Nested pointer chain. // Returns the innermost/last value of a Nested pointer chain.
EXPORT int access_nested(const Nested *n_ptr) { EXPORT int32_t access_nested(const Nested *n_ptr) {
// Edge case: `n_ptr == NULL` (i.e. first Nested is None). // Edge case: `n_ptr == NULL` (i.e. first Nested is None).
if (!n_ptr) { return 0; } if (!n_ptr) { return 0; }
@ -41,10 +42,10 @@ EXPORT int access_nested(const Nested *n_ptr) {
/* Test: test_access_static */ /* Test: test_access_static */
typedef struct Static { typedef struct Static {
int value; int32_t value;
struct Static *recurse; struct Static *recurse;
} Static; } Static;
EXPORT int access_static(const Static *s_ptr) { EXPORT int32_t access_static(const Static *s_ptr) {
return s_ptr->recurse->recurse->value; return s_ptr->recurse->recurse->value;
} }

View file

@ -1,23 +1,24 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h>
// See comments in build_native_lib() // See comments in build_native_lib()
#define EXPORT __attribute__((visibility("default"))) #define EXPORT __attribute__((visibility("default")))
/* Test: test_increment_int */ /* Test: test_increment_int */
EXPORT void increment_int(int *ptr) { EXPORT void increment_int(int32_t *ptr) {
*ptr += 1; *ptr += 1;
} }
/* Test: test_init_int */ /* Test: test_init_int */
EXPORT void init_int(int *ptr, int val) { EXPORT void init_int(int32_t *ptr, int32_t val) {
*ptr = val; *ptr = val;
} }
/* Test: test_init_array */ /* Test: test_init_array */
EXPORT void init_array(int *array, size_t len, int val) { EXPORT void init_array(int32_t *array, size_t len, int32_t val) {
for (size_t i = 0; i < len; i++) { for (size_t i = 0; i < len; i++) {
array[i] = val; array[i] = val;
} }
@ -26,28 +27,28 @@ EXPORT void init_array(int *array, size_t len, int val) {
/* Test: test_init_static_inner */ /* Test: test_init_static_inner */
typedef struct SyncPtr { typedef struct SyncPtr {
int *ptr; int32_t *ptr;
} SyncPtr; } SyncPtr;
EXPORT void init_static_inner(const SyncPtr *s_ptr, int val) { EXPORT void init_static_inner(const SyncPtr *s_ptr, int32_t val) {
*(s_ptr->ptr) = val; *(s_ptr->ptr) = val;
} }
/* Tests: test_exposed, test_pass_dangling */ /* Tests: test_exposed, test_pass_dangling */
EXPORT void ignore_ptr(__attribute__((unused)) const int *ptr) { EXPORT void ignore_ptr(__attribute__((unused)) const int32_t *ptr) {
return; return;
} }
/* Test: test_expose_int */ /* Test: test_expose_int */
EXPORT void expose_int(const int *int_ptr, const int **pptr) { EXPORT void expose_int(const int32_t *int_ptr, const int32_t **pptr) {
*pptr = int_ptr; *pptr = int_ptr;
} }
/* Test: test_swap_ptr */ /* Test: test_swap_ptr */
EXPORT void swap_ptr(const int **pptr0, const int **pptr1) { EXPORT void swap_ptr(const int32_t **pptr0, const int32_t **pptr1) {
const int *tmp = *pptr0; const int32_t *tmp = *pptr0;
*pptr0 = *pptr1; *pptr0 = *pptr1;
*pptr1 = tmp; *pptr1 = tmp;
} }
@ -55,36 +56,54 @@ EXPORT void swap_ptr(const int **pptr0, const int **pptr1) {
/* Test: test_swap_ptr_tuple */ /* Test: test_swap_ptr_tuple */
typedef struct Tuple { typedef struct Tuple {
int *ptr0; int32_t *ptr0;
int *ptr1; int32_t *ptr1;
} Tuple; } Tuple;
EXPORT void swap_ptr_tuple(Tuple *t_ptr) { EXPORT void swap_ptr_tuple(Tuple *t_ptr) {
int *tmp = t_ptr->ptr0; int32_t *tmp = t_ptr->ptr0;
t_ptr->ptr0 = t_ptr->ptr1; t_ptr->ptr0 = t_ptr->ptr1;
t_ptr->ptr1 = tmp; t_ptr->ptr1 = tmp;
} }
/* Test: test_overwrite_dangling */ /* Test: test_overwrite_dangling */
EXPORT void overwrite_ptr(const int **pptr) { EXPORT void overwrite_ptr(const int32_t **pptr) {
*pptr = NULL; *pptr = NULL;
} }
/* Test: test_swap_ptr_triple_dangling */ /* Test: test_swap_ptr_triple_dangling */
typedef struct Triple { typedef struct Triple {
int *ptr0; int32_t *ptr0;
int *ptr1; int32_t *ptr1;
int *ptr2; int32_t *ptr2;
} Triple; } Triple;
EXPORT void swap_ptr_triple_dangling(Triple *t_ptr) { EXPORT void swap_ptr_triple_dangling(Triple *t_ptr) {
int *tmp = t_ptr->ptr0; int32_t *tmp = t_ptr->ptr0;
t_ptr->ptr0 = t_ptr->ptr2; t_ptr->ptr0 = t_ptr->ptr2;
t_ptr->ptr2 = tmp; t_ptr->ptr2 = tmp;
} }
EXPORT const int *return_ptr(const int *ptr) { EXPORT const int32_t *return_ptr(const int32_t *ptr) {
return ptr; return ptr;
} }
/* Test: test_pass_ptr_as_int */
EXPORT void pass_ptr_as_int(uintptr_t ptr, int32_t set_to_val) {
*(int32_t*)ptr = set_to_val;
}
/* Test: test_pass_ptr_via_previously_shared_mem */
int32_t** shared_place;
EXPORT void set_shared_mem(int32_t** ptr) {
shared_place = ptr;
}
EXPORT void init_ptr_stored_in_shared_mem(int32_t val) {
**shared_place = val;
}

View file

@ -1,9 +1,10 @@
#include <stdio.h> #include <stdio.h>
#include <stdint.h>
// See comments in build_native_lib() // See comments in build_native_lib()
#define EXPORT __attribute__((visibility("default"))) #define EXPORT __attribute__((visibility("default")))
EXPORT int add_one_int(int x) { EXPORT int32_t add_one_int(int32_t x) {
return 2 + x; return 2 + x;
} }
@ -13,23 +14,23 @@ EXPORT void printer(void) {
// function with many arguments, to test functionality when some args are stored // function with many arguments, to test functionality when some args are stored
// on the stack // on the stack
EXPORT int test_stack_spill(int a, int b, int c, int d, int e, int f, int g, int h, int i, int j, int k, int l) { EXPORT int32_t test_stack_spill(int32_t a, int32_t b, int32_t c, int32_t d, int32_t e, int32_t f, int32_t g, int32_t h, int32_t i, int32_t j, int32_t k, int32_t l) {
return a+b+c+d+e+f+g+h+i+j+k+l; return a+b+c+d+e+f+g+h+i+j+k+l;
} }
EXPORT unsigned int get_unsigned_int(void) { EXPORT uint32_t get_unsigned_int(void) {
return -10; return -10;
} }
EXPORT short add_int16(short x) { EXPORT short add_int16(int16_t x) {
return x + 3; return x + 3;
} }
EXPORT long add_short_to_long(short x, long y) { EXPORT long add_short_to_long(int16_t x, int64_t y) {
return x + y; return x + y;
} }
// To test that functions not marked with EXPORT cannot be called by Miri. // To test that functions not marked with EXPORT cannot be called by Miri.
int not_exported(void) { int32_t not_exported(void) {
return 0; return 0;
} }

View file

@ -2359,6 +2359,21 @@ impl Rewrite for ast::Param {
} }
} }
fn rewrite_opt_lifetime(
context: &RewriteContext<'_>,
lifetime: Option<ast::Lifetime>,
) -> RewriteResult {
let Some(l) = lifetime else {
return Ok(String::new());
};
let mut result = l.rewrite_result(
context,
Shape::legacy(context.config.max_width(), Indent::empty()),
)?;
result.push(' ');
Ok(result)
}
fn rewrite_explicit_self( fn rewrite_explicit_self(
context: &RewriteContext<'_>, context: &RewriteContext<'_>,
explicit_self: &ast::ExplicitSelf, explicit_self: &ast::ExplicitSelf,
@ -2367,58 +2382,34 @@ fn rewrite_explicit_self(
shape: Shape, shape: Shape,
has_multiple_attr_lines: bool, has_multiple_attr_lines: bool,
) -> RewriteResult { ) -> RewriteResult {
match explicit_self.node { let self_str = match explicit_self.node {
ast::SelfKind::Region(lt, m) => { ast::SelfKind::Region(lt, m) => {
let mut_str = format_mutability(m); let mut_str = format_mutability(m);
match lt { let lifetime_str = rewrite_opt_lifetime(context, lt)?;
Some(ref l) => { format!("&{lifetime_str}{mut_str}self")
let lifetime_str = l.rewrite_result(
context,
Shape::legacy(context.config.max_width(), Indent::empty()),
)?;
Ok(combine_strs_with_missing_comments(
context,
param_attrs,
&format!("&{lifetime_str} {mut_str}self"),
span,
shape,
!has_multiple_attr_lines,
)?)
}
None => Ok(combine_strs_with_missing_comments(
context,
param_attrs,
&format!("&{mut_str}self"),
span,
shape,
!has_multiple_attr_lines,
)?),
} }
ast::SelfKind::Pinned(lt, m) => {
let mut_str = m.ptr_str();
let lifetime_str = rewrite_opt_lifetime(context, lt)?;
format!("&{lifetime_str}pin {mut_str} self")
} }
ast::SelfKind::Explicit(ref ty, mutability) => { ast::SelfKind::Explicit(ref ty, mutability) => {
let type_str = ty.rewrite_result( let type_str = ty.rewrite_result(
context, context,
Shape::legacy(context.config.max_width(), Indent::empty()), Shape::legacy(context.config.max_width(), Indent::empty()),
)?; )?;
format!("{}self: {}", format_mutability(mutability), type_str)
}
ast::SelfKind::Value(mutability) => format!("{}self", format_mutability(mutability)),
};
Ok(combine_strs_with_missing_comments( Ok(combine_strs_with_missing_comments(
context, context,
param_attrs, param_attrs,
&format!("{}self: {}", format_mutability(mutability), type_str), &self_str,
span, span,
shape, shape,
!has_multiple_attr_lines, !has_multiple_attr_lines,
)?) )?)
}
ast::SelfKind::Value(mutability) => Ok(combine_strs_with_missing_comments(
context,
param_attrs,
&format!("{}self", format_mutability(mutability)),
span,
shape,
!has_multiple_attr_lines,
)?),
}
} }
pub(crate) fn span_lo_for_param(param: &ast::Param) -> BytePos { pub(crate) fn span_lo_for_param(param: &ast::Param) -> BytePos {

View file

@ -8,3 +8,13 @@ fn g<'a>(x: & 'a pin const i32) {}
fn h<'a>(x: & 'a pin fn h<'a>(x: & 'a pin
mut i32) {} mut i32) {}
fn i(x: &pin mut i32) {} fn i(x: &pin mut i32) {}
struct Foo;
impl Foo {
fn f(&pin const self) {}
fn g<'a>(& 'a pin const self) {}
fn h<'a>(& 'a pin
mut self) {}
fn i(&pin mut self) {}
}

View file

@ -7,3 +7,12 @@ fn f(x: &pin const i32) {}
fn g<'a>(x: &'a pin const i32) {} fn g<'a>(x: &'a pin const i32) {}
fn h<'a>(x: &'a pin mut i32) {} fn h<'a>(x: &'a pin mut i32) {}
fn i(x: &pin mut i32) {} fn i(x: &pin mut i32) {}
struct Foo;
impl Foo {
fn f(&pin const self) {}
fn g<'a>(&'a pin const self) {}
fn h<'a>(&'a pin mut self) {}
fn i(&pin mut self) {}
}

View file

@ -16,7 +16,7 @@ pub fn bar() {
// CHECK: call pattern_type_symbols::foo::<u32> // CHECK: call pattern_type_symbols::foo::<u32>
// CHECK: call void @_RINvC[[CRATE_IDENT:[a-zA-Z0-9]{12}]]_20pattern_type_symbols3foomEB2_ // CHECK: call void @_RINvC[[CRATE_IDENT:[a-zA-Z0-9]{12}]]_20pattern_type_symbols3foomEB2_
foo::<u32>(); foo::<u32>();
// CHECK: call pattern_type_symbols::foo::<(u32, [(); 0], [(); 999999999], [(); true])> // CHECK: call pattern_type_symbols::foo::<(u32, [(); 0], [(); 999999999])>
// CHECK: call void @_RINvC[[CRATE_IDENT]]_20pattern_type_symbols3fooTmAum0_Aum3b9ac9ff_Aub1_EEB2_ // CHECK: call void @_RINvC[[CRATE_IDENT]]_20pattern_type_symbols3fooTmAum0_Aum3b9ac9ff_EEB2_
foo::<NanoU32>(); foo::<NanoU32>();
} }

View file

@ -3,9 +3,9 @@
fn main() -> () { fn main() -> () {
let mut _0: (); let mut _0: ();
scope 1 { scope 1 {
debug x => const 2_u32 is 1..=; debug x => const 2_u32 is 1..;
scope 2 { scope 2 {
debug y => const {transmute(0x00000000): (u32) is 1..=}; debug y => const {transmute(0x00000000): (u32) is 1..};
} }
} }

View file

@ -5,8 +5,8 @@ use std::pat::pattern_type;
// EMIT_MIR pattern_types.main.PreCodegen.after.mir // EMIT_MIR pattern_types.main.PreCodegen.after.mir
fn main() { fn main() {
// CHECK: debug x => const 2_u32 is 1..= // CHECK: debug x => const 2_u32 is 1..
let x: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(2) }; let x: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(2) };
// CHECK: debug y => const {transmute(0x00000000): (u32) is 1..=} // CHECK: debug y => const {transmute(0x00000000): (u32) is 1..}
let y: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(0) }; let y: pattern_type!(u32 is 1..) = unsafe { std::mem::transmute(0) };
} }

View file

@ -0,0 +1,24 @@
//@ pp-exact
#![feature(pin_ergonomics)]
#![allow(dead_code, incomplete_features)]
struct Foo;
impl Foo {
fn baz(&pin mut self) {}
fn baz_const(&pin const self) {}
fn baz_lt<'a>(&'a pin mut self) {}
fn baz_const_lt(&'_ pin const self) {}
}
fn foo(_: &pin mut Foo) {}
fn foo_lt<'a>(_: &'a pin mut Foo) {}
fn foo_const(_: &pin const Foo) {}
fn foo_const_lt(_: &'_ pin const Foo) {}
fn main() {}

View file

@ -19,8 +19,33 @@ pub enum I128Enum {
I128D = i128::MAX.to_le(), I128D = i128::MAX.to_le(),
} }
#[cfg(not(old_llvm))]
#[repr(u128)]
pub enum U128VariantEnum {
VariantU128A(u8) = 0_u128.to_le(),
VariantU128B = 1_u128.to_le(),
VariantU128C = (u64::MAX as u128 + 1).to_le(),
VariantU128D = u128::MAX.to_le(),
}
#[cfg(not(old_llvm))]
#[repr(i128)]
pub enum I128VariantEnum {
VariantI128A(u8) = 0_i128.to_le(),
VariantI128B = (-1_i128).to_le(),
VariantI128C = i128::MIN.to_le(),
VariantI128D = i128::MAX.to_le(),
}
pub fn f(_: U128Enum, _: I128Enum) {} pub fn f(_: U128Enum, _: I128Enum) {}
#[cfg(not(old_llvm))]
pub fn g(_: U128VariantEnum, _: I128VariantEnum) {}
fn main() { fn main() {
f(U128Enum::U128A, I128Enum::I128A); f(U128Enum::U128A, I128Enum::I128A);
#[cfg(not(old_llvm))]
{
g(U128VariantEnum::VariantU128A(1), I128VariantEnum::VariantI128A(2));
}
} }

View file

@ -5,13 +5,32 @@ use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
use gimli::read::DebuggingInformationEntry;
use gimli::{AttributeValue, EndianRcSlice, Reader, RunTimeEndian}; use gimli::{AttributeValue, EndianRcSlice, Reader, RunTimeEndian};
use object::{Object, ObjectSection}; use object::{Object, ObjectSection};
use run_make_support::{gimli, object, rfs, rustc}; use run_make_support::{gimli, object, rfs, rustc};
fn main() { fn main() {
// Before LLVM 20, 128-bit enums with variants didn't emit debuginfo correctly.
// This check can be removed once Rust no longer supports LLVM 18 and 19.
let llvm_version = rustc()
.verbose()
.arg("--version")
.run()
.stdout_utf8()
.lines()
.filter_map(|line| line.strip_prefix("LLVM version: "))
.map(|version| version.split(".").next().unwrap().parse::<u32>().unwrap())
.next()
.unwrap();
let is_old_llvm = llvm_version < 20;
let output = PathBuf::from("repr128"); let output = PathBuf::from("repr128");
rustc().input("main.rs").output(&output).arg("-Cdebuginfo=2").run(); let mut rustc = rustc();
if is_old_llvm {
rustc.cfg("old_llvm");
}
rustc.input("main.rs").output(&output).arg("-Cdebuginfo=2").run();
// Mach-O uses packed debug info // Mach-O uses packed debug info
let dsym_location = output let dsym_location = output
.with_extension("dSYM") .with_extension("dSYM")
@ -29,7 +48,8 @@ fn main() {
}) })
.unwrap(); .unwrap();
let mut iter = dwarf.units(); let mut iter = dwarf.units();
let mut still_to_find = HashMap::from([
let mut enumerators_to_find = HashMap::from([
("U128A", 0_u128), ("U128A", 0_u128),
("U128B", 1_u128), ("U128B", 1_u128),
("U128C", u64::MAX as u128 + 1), ("U128C", u64::MAX as u128 + 1),
@ -39,22 +59,69 @@ fn main() {
("I128C", i128::MIN as u128), ("I128C", i128::MIN as u128),
("I128D", i128::MAX as u128), ("I128D", i128::MAX as u128),
]); ]);
let mut variants_to_find = HashMap::from([
("VariantU128A", 0_u128),
("VariantU128B", 1_u128),
("VariantU128C", u64::MAX as u128 + 1),
("VariantU128D", u128::MAX),
("VariantI128A", 0_i128 as u128),
("VariantI128B", (-1_i128) as u128),
("VariantI128C", i128::MIN as u128),
("VariantI128D", i128::MAX as u128),
]);
while let Some(header) = iter.next().unwrap() { while let Some(header) = iter.next().unwrap() {
let unit = dwarf.unit(header).unwrap(); let unit = dwarf.unit(header).unwrap();
let mut cursor = unit.entries(); let mut cursor = unit.entries();
while let Some((_, entry)) = cursor.next_dfs().unwrap() {
if entry.tag() == gimli::constants::DW_TAG_enumerator { let get_name = |entry: &DebuggingInformationEntry<'_, '_, _>| {
let name = dwarf let name = dwarf
.attr_string( .attr_string(
&unit, &unit,
entry.attr(gimli::constants::DW_AT_name).unwrap().unwrap().value(), entry.attr(gimli::constants::DW_AT_name).unwrap().unwrap().value(),
) )
.unwrap(); .unwrap();
let name = name.to_string().unwrap(); name.to_string().unwrap().to_string()
if let Some(expected) = still_to_find.remove(name.as_ref()) { };
match entry.attr(gimli::constants::DW_AT_const_value).unwrap().unwrap().value()
while let Some((_, entry)) = cursor.next_dfs().unwrap() {
match entry.tag() {
gimli::constants::DW_TAG_variant if !is_old_llvm => {
let value = match entry
.attr(gimli::constants::DW_AT_discr_value)
.unwrap()
.unwrap()
.value()
{
AttributeValue::Block(value) => value.to_slice().unwrap().to_vec(),
value => panic!("unexpected DW_AT_discr_value of {value:?}"),
};
// The `DW_TAG_member` that is a child of `DW_TAG_variant` will contain the
// variant's name.
let Some((1, child_entry)) = cursor.next_dfs().unwrap() else {
panic!("Missing child of DW_TAG_variant");
};
assert_eq!(child_entry.tag(), gimli::constants::DW_TAG_member);
let name = get_name(child_entry);
if let Some(expected) = variants_to_find.remove(name.as_str()) {
// This test uses LE byte order is used for consistent values across
// architectures.
assert_eq!(value.as_slice(), expected.to_le_bytes().as_slice(), "{name}");
}
}
gimli::constants::DW_TAG_enumerator => {
let name = get_name(entry);
if let Some(expected) = enumerators_to_find.remove(name.as_str()) {
match entry
.attr(gimli::constants::DW_AT_const_value)
.unwrap()
.unwrap()
.value()
{ {
AttributeValue::Block(value) => { AttributeValue::Block(value) => {
// This test uses LE byte order is used for consistent values across
// architectures.
assert_eq!( assert_eq!(
value.to_slice().unwrap(), value.to_slice().unwrap(),
expected.to_le_bytes().as_slice(), expected.to_le_bytes().as_slice(),
@ -65,9 +132,15 @@ fn main() {
} }
} }
} }
_ => {}
} }
} }
if !still_to_find.is_empty() { }
panic!("Didn't find debug entries for {still_to_find:?}"); if !enumerators_to_find.is_empty() {
panic!("Didn't find debug enumerator entries for {enumerators_to_find:?}");
}
if !is_old_llvm && !variants_to_find.is_empty() {
panic!("Didn't find debug variant entries for {variants_to_find:?}");
} }
} }

View file

@ -26,21 +26,21 @@ pub mod single_reexport {
//@ has 'foo/single_reexport/index.html' //@ has 'foo/single_reexport/index.html'
// First we check that we have 4 type aliases. // First we check that we have 4 type aliases.
//@ count - '//*[@id="main-content"]/*[@class="item-table reexports"]//code' 4 //@ count - '//*[@id="main-content"]/*[@class="item-table reexports"]//code' 0
// Then we check that we have the correct link for each re-export. // Then we check that we have the correct link for each re-export.
//@ !has - '//*[@href="struct.Foo.html"]' 'Foo' //@ !has - '//*[@href="struct.Foo.html"]' 'Foo'
//@ has - '//*[@id="reexport.Foo"]/code' 'pub use crate::private_module::Public as Foo;' //@ !has - '//*[@id="reexport.Foo"]/code' 'pub use crate::private_module::Public as Foo;'
pub use crate::private_module::Public as Foo; pub use crate::private_module::Public as Foo;
//@ !has - '//*[@href="type.Foo2.html"]' 'Foo2' //@ !has - '//*[@href="type.Foo2.html"]' 'Foo2'
//@ has - '//*[@id="reexport.Foo2"]/code' 'pub use crate::private_module::Bar as Foo2;' //@ !has - '//*[@id="reexport.Foo2"]/code' 'pub use crate::private_module::Bar as Foo2;'
pub use crate::private_module::Bar as Foo2; pub use crate::private_module::Bar as Foo2;
//@ !has - '//*[@href="type.Yo.html"]' 'Yo' //@ !has - '//*[@href="type.Yo.html"]' 'Yo'
//@ has - '//*[@id="reexport.Yo"]/code' 'pub use crate::Bar3 as Yo;' //@ !has - '//*[@id="reexport.Yo"]/code' 'pub use crate::Bar3 as Yo;'
pub use crate::Bar3 as Yo; pub use crate::Bar3 as Yo;
//@ !has - '//*[@href="struct.Yo2.html"]' 'Yo2' //@ !has - '//*[@href="struct.Yo2.html"]' 'Yo2'
//@ has - '//*[@id="reexport.Yo2"]/code' 'pub use crate::FooFoo as Yo2;' //@ !has - '//*[@id="reexport.Yo2"]/code' 'pub use crate::FooFoo as Yo2;'
pub use crate::FooFoo as Yo2; pub use crate::FooFoo as Yo2;
// Checking that each file is also created as expected. // Checking that each file is also created as expected.
@ -70,19 +70,19 @@ pub mod single_reexport_no_inline {
//@ has - '//*[@id="main-content"]/*[@class="section-header"]' 'Re-exports' //@ has - '//*[@id="main-content"]/*[@class="section-header"]' 'Re-exports'
// Now we check that we don't have links to the items, just `pub use`. // Now we check that we don't have links to the items, just `pub use`.
//@ has - '//*[@id="main-content"]//*' 'pub use crate::private_module::Public as XFoo;' //@ !has - '//*[@id="main-content"]//*' 'pub use crate::private_module::Public as XFoo;'
//@ !has - '//*[@id="main-content"]//a' 'XFoo' //@ !has - '//*[@id="main-content"]//a' 'XFoo'
#[doc(no_inline)] #[doc(no_inline)]
pub use crate::private_module::Public as XFoo; pub use crate::private_module::Public as XFoo;
//@ has - '//*[@id="main-content"]//*' 'pub use crate::private_module::Bar as Foo2;' //@ !has - '//*[@id="main-content"]//*' 'pub use crate::private_module::Bar as Foo2;'
//@ !has - '//*[@id="main-content"]//a' 'Foo2' //@ !has - '//*[@id="main-content"]//a' 'Foo2'
#[doc(no_inline)] #[doc(no_inline)]
pub use crate::private_module::Bar as Foo2; pub use crate::private_module::Bar as Foo2;
//@ has - '//*[@id="main-content"]//*' 'pub use crate::Bar3 as Yo;' //@ !has - '//*[@id="main-content"]//*' 'pub use crate::Bar3 as Yo;'
//@ !has - '//*[@id="main-content"]//a' 'Yo' //@ !has - '//*[@id="main-content"]//a' 'Yo'
#[doc(no_inline)] #[doc(no_inline)]
pub use crate::Bar3 as Yo; pub use crate::Bar3 as Yo;
//@ has - '//*[@id="main-content"]//*' 'pub use crate::FooFoo as Yo2;' //@ !has - '//*[@id="main-content"]//*' 'pub use crate::FooFoo as Yo2;'
//@ !has - '//*[@id="main-content"]//a' 'Yo2' //@ !has - '//*[@id="main-content"]//a' 'Yo2'
#[doc(no_inline)] #[doc(no_inline)]
pub use crate::FooFoo as Yo2; pub use crate::FooFoo as Yo2;

View file

@ -0,0 +1,16 @@
// Test for <https://github.com/rust-lang/rust/issues/137342>.
#![crate_name = "foo"]
//@ has 'foo/index.html'
//@ !has - '//*[@id="main-content"]//*[@class="struct"]' 'Bar'
#[doc(hidden)]
pub struct Bar;
//@ !has - '//*' 'pub use crate::Bar as A;'
pub use crate::Bar as A;
//@ !has - '//*' 'pub use crate::A as B;'
pub use crate::A as B;
//@ has - '//dt/a[@class="struct"]' 'C'
#[doc(inline)]
pub use crate::Bar as C;

View file

@ -5,7 +5,7 @@
extern crate rustdoc_impl_parts_crosscrate; extern crate rustdoc_impl_parts_crosscrate;
pub struct Bar<T> { t: T } pub struct Bar<T: Copy + Send> { t: T }
// The output file is html embedded in javascript, so the html tags // The output file is html embedded in javascript, so the html tags
// aren't stripped by the processing script and we can't check for the // aren't stripped by the processing script and we can't check for the

View file

@ -3,7 +3,7 @@
pub auto trait AnAutoTrait {} pub auto trait AnAutoTrait {}
pub struct Foo<T> { field: T } pub struct Foo<T: Clone + Sync> { field: T }
//@ has impl_parts/struct.Foo.html '//*[@class="impl"]//h3[@class="code-header"]' \ //@ has impl_parts/struct.Foo.html '//*[@class="impl"]//h3[@class="code-header"]' \
// "impl<T> !AnAutoTrait for Foo<T>where T: Sync + Clone," // "impl<T> !AnAutoTrait for Foo<T>where T: Sync + Clone,"

View file

@ -6,7 +6,7 @@ extern crate rustdoc_hidden;
//@ has inline_hidden/index.html //@ has inline_hidden/index.html
// Ensures this item is not inlined. // Ensures this item is not inlined.
//@ has - '//*[@id="reexport.Foo"]/code' 'pub use rustdoc_hidden::Foo;' //@ !has - '//*[@id="reexport.Foo"]/code' 'pub use rustdoc_hidden::Foo;'
#[doc(no_inline)] #[doc(no_inline)]
pub use rustdoc_hidden::Foo; pub use rustdoc_hidden::Foo;
@ -16,7 +16,7 @@ pub use rustdoc_hidden::Foo;
pub use rustdoc_hidden::Foo as Inlined; pub use rustdoc_hidden::Foo as Inlined;
// Even with this import, we should not see `Foo`. // Even with this import, we should not see `Foo`.
//@ count - '//dt' 4 //@ count - '//dt' 3
//@ has - '//dt/a[@class="struct"]' 'Bar' //@ has - '//dt/a[@class="struct"]' 'Bar'
//@ has - '//dt/a[@class="fn"]' 'foo' //@ has - '//dt/a[@class="fn"]' 'foo'
pub use rustdoc_hidden::*; pub use rustdoc_hidden::*;

View file

@ -18,7 +18,7 @@ pub use Foo1 as Foo2;
// First we ensure that only the reexport `Bar2` and the inlined struct `Bar` // First we ensure that only the reexport `Bar2` and the inlined struct `Bar`
// are inlined. // are inlined.
//@ count - '//a[@class="struct"]' 2 //@ count - '//a[@class="struct"]' 1
// Then we check that `cfg` is displayed for base item, but not for intermediate re-exports. // Then we check that `cfg` is displayed for base item, but not for intermediate re-exports.
//@ has - '//*[@class="stab portability"]' 'foo' //@ has - '//*[@class="stab portability"]' 'foo'
//@ !has - '//*[@class="stab portability"]' 'bar' //@ !has - '//*[@class="stab portability"]' 'bar'
@ -29,5 +29,5 @@ pub use Foo2 as Bar;
// This one should appear but `Bar2` won't be linked because there is no // This one should appear but `Bar2` won't be linked because there is no
// `#[doc(inline)]`. // `#[doc(inline)]`.
//@ has - '//*[@id="reexport.Bar2"]' 'pub use Foo2 as Bar2;' //@ !has - '//*[@id="reexport.Bar2"]' 'pub use Foo2 as Bar2;'
pub use Foo2 as Bar2; pub use Foo2 as Bar2;

View file

@ -9,8 +9,8 @@ mod private_module {
} }
//@ has 'foo/index.html' //@ has 'foo/index.html'
//@ has - '//*[@id="reexport.Foo"]/code' 'pub use crate::private_module::Public as Foo;' //@ !has - '//*[@id="reexport.Foo"]/code' 'pub use crate::private_module::Public as Foo;'
pub use crate::private_module::Public as Foo; pub use crate::private_module::Public as Foo;
// Glob re-exports with no visible items should not be displayed. // Glob re-exports with no visible items should not be displayed.
//@ count - '//*[@class="item-table reexports"]/dt' 1 //@ count - '//*[@class="item-table reexports"]/dt' 0
pub use crate::private_module::*; pub use crate::private_module::*;

View file

@ -8,7 +8,7 @@
pub type Type = u32; pub type Type = u32;
//@ has 'foo/index.html' //@ has 'foo/index.html'
//@ has - '//*[@id="reexport.Type2"]/code' 'pub use crate::Type as Type2;' //@ !has - '//*[@id="reexport.Type2"]/code' 'pub use crate::Type as Type2;'
pub use crate::Type as Type2; pub use crate::Type as Type2;
//@ count - '//*[@id="reexport.Type3"]' 0 //@ count - '//*[@id="reexport.Type3"]' 0
@ -21,5 +21,5 @@ macro_rules! foo {
() => {}; () => {};
} }
//@ has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' //@ !has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
pub use crate::foo as Macro; pub use crate::foo as Macro;

View file

@ -5,7 +5,7 @@
//@ has 'foo/index.html' //@ has 'foo/index.html'
//@ has - '//*[@id="main-content"]//a[@href="macro.Macro2.html"]' 'Macro2' //@ has - '//*[@id="main-content"]//a[@href="macro.Macro2.html"]' 'Macro2'
//@ has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;' //@ !has - '//*[@id="reexport.Macro"]/code' 'pub use crate::foo as Macro;'
//@ has 'foo/macro.Macro2.html' //@ has 'foo/macro.Macro2.html'
//@ has - '//*[@class="docblock"]' 'Displayed' //@ has - '//*[@class="docblock"]' 'Displayed'

Some files were not shown because too many files have changed in this diff Show more