1
Fork 0

Merge branch 'master' into is-symlink-stabilization

This commit is contained in:
Max Wase 2021-10-13 01:33:12 +03:00 committed by GitHub
commit 3e0360f3d4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
365 changed files with 4893 additions and 3106 deletions

View file

@ -89,6 +89,15 @@ gimli.debug = 0
miniz_oxide.debug = 0 miniz_oxide.debug = 0
object.debug = 0 object.debug = 0
# The only package that ever uses debug builds is bootstrap.
# We care a lot about bootstrap's compile times, so don't include debug info for
# dependencies, only bootstrap itself.
[profile.dev]
debug = 0
[profile.dev.package]
# Only use debuginfo=1 to further reduce compile times.
bootstrap.debug = 1
# We want the RLS to use the version of Cargo that we've got vendored in this # We want the RLS to use the version of Cargo that we've got vendored in this
# repository to ensure that the same exact version of Cargo is used by both the # repository to ensure that the same exact version of Cargo is used by both the
# RLS and the Cargo binary itself. The RLS depends on Cargo as a git repository # RLS and the Cargo binary itself. The RLS depends on Cargo as a git repository

View file

@ -64,7 +64,6 @@ Stabilised APIs
- [`VecDeque::shrink_to`] - [`VecDeque::shrink_to`]
- [`HashMap::shrink_to`] - [`HashMap::shrink_to`]
- [`HashSet::shrink_to`] - [`HashSet::shrink_to`]
- [`task::ready!`]
These APIs are now usable in const contexts: These APIs are now usable in const contexts:
@ -128,7 +127,6 @@ and related tools.
[`VecDeque::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/struct.VecDeque.html#method.shrink_to [`VecDeque::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/struct.VecDeque.html#method.shrink_to
[`HashMap::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/hash_map/struct.HashMap.html#method.shrink_to [`HashMap::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/hash_map/struct.HashMap.html#method.shrink_to
[`HashSet::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/hash_set/struct.HashSet.html#method.shrink_to [`HashSet::shrink_to`]: https://doc.rust-lang.org/stable/std/collections/hash_set/struct.HashSet.html#method.shrink_to
[`task::ready!`]: https://doc.rust-lang.org/stable/std/task/macro.ready.html
[`std::mem::transmute`]: https://doc.rust-lang.org/stable/std/mem/fn.transmute.html [`std::mem::transmute`]: https://doc.rust-lang.org/stable/std/mem/fn.transmute.html
[`slice::first`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.first [`slice::first`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.first
[`slice::split_first`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_first [`slice::split_first`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_first

View file

@ -389,6 +389,7 @@ impl<S: Semantics> fmt::Display for IeeeFloat<S> {
let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize); let _: Loss = sig::shift_right(&mut sig, &mut exp, trailing_zeros as usize);
// Change the exponent from 2^e to 10^e. // Change the exponent from 2^e to 10^e.
#[allow(clippy::comparison_chain)]
if exp == 0 { if exp == 0 {
// Nothing to do. // Nothing to do.
} else if exp > 0 { } else if exp > 0 {
@ -2526,6 +2527,7 @@ mod sig {
if *a_sign ^ b_sign { if *a_sign ^ b_sign {
let (reverse, loss); let (reverse, loss);
#[allow(clippy::comparison_chain)]
if bits == 0 { if bits == 0 {
reverse = cmp(a_sig, b_sig) == Ordering::Less; reverse = cmp(a_sig, b_sig) == Ordering::Less;
loss = Loss::ExactlyZero; loss = Loss::ExactlyZero;

View file

@ -20,16 +20,6 @@
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;
#[macro_export]
macro_rules! unwrap_or {
($opt:expr, $default:expr) => {
match $opt {
Some(x) => x,
None => $default,
}
};
}
pub mod util { pub mod util {
pub mod classify; pub mod classify;
pub mod comments; pub mod comments;

View file

@ -202,39 +202,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let mut used_input_regs = FxHashMap::default(); let mut used_input_regs = FxHashMap::default();
let mut used_output_regs = FxHashMap::default(); let mut used_output_regs = FxHashMap::default();
let mut required_features: Vec<&str> = vec![];
for (idx, &(ref op, op_sp)) in operands.iter().enumerate() { for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
if let Some(reg) = op.reg() { if let Some(reg) = op.reg() {
// Make sure we don't accidentally carry features from the
// previous iteration.
required_features.clear();
let reg_class = reg.reg_class(); let reg_class = reg.reg_class();
if reg_class == asm::InlineAsmRegClass::Err { if reg_class == asm::InlineAsmRegClass::Err {
continue; continue;
} }
// We ignore target feature requirements for clobbers: if the
// feature is disabled then the compiler doesn't care what we
// do with the registers.
//
// Note that this is only possible for explicit register
// operands, which cannot be used in the asm string.
let is_clobber = matches!(
op,
hir::InlineAsmOperand::Out {
reg: asm::InlineAsmRegOrRegClass::Reg(_),
late: _,
expr: None
}
);
// Some register classes can only be used as clobbers. This // Some register classes can only be used as clobbers. This
// means that we disallow passing a value in/out of the asm and // means that we disallow passing a value in/out of the asm and
// require that the operand name an explicit register, not a // require that the operand name an explicit register, not a
// register class. // register class.
if reg_class.is_clobber_only(asm_arch.unwrap()) if reg_class.is_clobber_only(asm_arch.unwrap())
&& !(is_clobber && matches!(reg, asm::InlineAsmRegOrRegClass::Reg(_))) && !(op.is_clobber() && matches!(reg, asm::InlineAsmRegOrRegClass::Reg(_)))
{ {
let msg = format!( let msg = format!(
"register class `{}` can only be used as a clobber, \ "register class `{}` can only be used as a clobber, \
@ -245,47 +226,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
continue; continue;
} }
if !is_clobber {
// Validate register classes against currently enabled target
// features. We check that at least one type is available for
// the current target.
for &(_, feature) in reg_class.supported_types(asm_arch.unwrap()) {
if let Some(feature) = feature {
if self.sess.target_features.contains(&Symbol::intern(feature)) {
required_features.clear();
break;
} else {
required_features.push(feature);
}
} else {
required_features.clear();
break;
}
}
// We are sorting primitive strs here and can use unstable sort here
required_features.sort_unstable();
required_features.dedup();
match &required_features[..] {
[] => {}
[feature] => {
let msg = format!(
"register class `{}` requires the `{}` target feature",
reg_class.name(),
feature
);
sess.struct_span_err(op_sp, &msg).emit();
}
features => {
let msg = format!(
"register class `{}` requires at least one target feature: {}",
reg_class.name(),
features.join(", ")
);
sess.struct_span_err(op_sp, &msg).emit();
}
}
}
// Check for conflicts between explicit register operands. // Check for conflicts between explicit register operands.
if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg { if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
let (input, output) = match op { let (input, output) = match op {

View file

@ -1345,8 +1345,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
generics generics
.params .params
.iter() .iter()
.find(|p| def_id == self.resolver.local_def_id(p.id).to_def_id()) .any(|p| def_id == self.resolver.local_def_id(p.id).to_def_id())
.is_some()
} }
// Either the `bounded_ty` is not a plain type parameter, or // Either the `bounded_ty` is not a plain type parameter, or
// it's not found in the generic type parameters list. // it's not found in the generic type parameters list.

View file

@ -201,7 +201,7 @@ impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
let bb_data = &self.body[bb]; let bb_data = &self.body[bb];
debug_assert!(hi == bb_data.statements.len()); debug_assert!(hi == bb_data.statements.len());
for &succ_bb in bb_data.terminator().successors() { for &succ_bb in bb_data.terminator().successors() {
if self.visited.insert(succ_bb) == false { if !self.visited.insert(succ_bb) {
if succ_bb == location.block && first_lo > 0 { if succ_bb == location.block && first_lo > 0 {
// `succ_bb` has been seen before. If it wasn't // `succ_bb` has been seen before. If it wasn't
// fully processed, add its first part to `stack` // fully processed, add its first part to `stack`

View file

@ -972,8 +972,7 @@ fn suggest_ampmut<'tcx>(
if let Some(assignment_rhs_span) = opt_assignment_rhs_span { if let Some(assignment_rhs_span) = opt_assignment_rhs_span {
if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) { if let Ok(src) = tcx.sess.source_map().span_to_snippet(assignment_rhs_span) {
let is_mutbl = |ty: &str| -> bool { let is_mutbl = |ty: &str| -> bool {
if ty.starts_with("mut") { if let Some(rest) = ty.strip_prefix("mut") {
let rest = &ty[3..];
match rest.chars().next() { match rest.chars().next() {
// e.g. `&mut x` // e.g. `&mut x`
Some(c) if c.is_whitespace() => true, Some(c) if c.is_whitespace() => true,

View file

@ -1153,28 +1153,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
.convert_all(data); .convert_all(data);
} }
/// Convenient wrapper around `relate_tys::relate_types` -- see
/// that fn for docs.
fn relate_types(
&mut self,
a: Ty<'tcx>,
v: ty::Variance,
b: Ty<'tcx>,
locations: Locations,
category: ConstraintCategory,
) -> Fallible<()> {
relate_tys::relate_types(
self.infcx,
self.param_env,
a,
v,
b,
locations,
category,
self.borrowck_context,
)
}
/// Try to relate `sub <: sup` /// Try to relate `sub <: sup`
fn sub_types( fn sub_types(
&mut self, &mut self,

View file

@ -1,5 +1,5 @@
use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate}; use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate};
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin}; use rustc_infer::infer::NllRegionVariableOrigin;
use rustc_middle::mir::ConstraintCategory; use rustc_middle::mir::ConstraintCategory;
use rustc_middle::ty::relate::TypeRelation; use rustc_middle::ty::relate::TypeRelation;
use rustc_middle::ty::{self, Const, Ty}; use rustc_middle::ty::{self, Const, Ty};
@ -7,8 +7,9 @@ use rustc_trait_selection::traits::query::Fallible;
use crate::constraints::OutlivesConstraint; use crate::constraints::OutlivesConstraint;
use crate::diagnostics::UniverseInfo; use crate::diagnostics::UniverseInfo;
use crate::type_check::{BorrowCheckContext, Locations}; use crate::type_check::{Locations, TypeChecker};
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
/// Adds sufficient constraints to ensure that `a R b` where `R` depends on `v`: /// Adds sufficient constraints to ensure that `a R b` where `R` depends on `v`:
/// ///
/// - "Covariant" `a <: b` /// - "Covariant" `a <: b`
@ -17,38 +18,27 @@ use crate::type_check::{BorrowCheckContext, Locations};
/// ///
/// N.B., the type `a` is permitted to have unresolved inference /// N.B., the type `a` is permitted to have unresolved inference
/// variables, but not the type `b`. /// variables, but not the type `b`.
#[instrument(skip(infcx, param_env, borrowck_context), level = "debug")] #[instrument(skip(self), level = "debug")]
pub(super) fn relate_types<'tcx>( pub(super) fn relate_types(
infcx: &InferCtxt<'_, 'tcx>, &mut self,
param_env: ty::ParamEnv<'tcx>,
a: Ty<'tcx>, a: Ty<'tcx>,
v: ty::Variance, v: ty::Variance,
b: Ty<'tcx>, b: Ty<'tcx>,
locations: Locations, locations: Locations,
category: ConstraintCategory, category: ConstraintCategory,
borrowck_context: &mut BorrowCheckContext<'_, 'tcx>,
) -> Fallible<()> { ) -> Fallible<()> {
TypeRelating::new( TypeRelating::new(
infcx, self.infcx,
NllTypeRelatingDelegate::new( NllTypeRelatingDelegate::new(self, locations, category, UniverseInfo::relate(a, b)),
infcx,
borrowck_context,
param_env,
locations,
category,
UniverseInfo::relate(a, b),
),
v, v,
) )
.relate(a, b)?; .relate(a, b)?;
Ok(()) Ok(())
} }
}
struct NllTypeRelatingDelegate<'me, 'bccx, 'tcx> { struct NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
infcx: &'me InferCtxt<'me, 'tcx>, type_checker: &'me mut TypeChecker<'bccx, 'tcx>,
borrowck_context: &'me mut BorrowCheckContext<'bccx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
/// Where (and why) is this relation taking place? /// Where (and why) is this relation taking place?
locations: Locations, locations: Locations,
@ -63,25 +53,24 @@ struct NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
impl NllTypeRelatingDelegate<'me, 'bccx, 'tcx> { impl NllTypeRelatingDelegate<'me, 'bccx, 'tcx> {
fn new( fn new(
infcx: &'me InferCtxt<'me, 'tcx>, type_checker: &'me mut TypeChecker<'bccx, 'tcx>,
borrowck_context: &'me mut BorrowCheckContext<'bccx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
locations: Locations, locations: Locations,
category: ConstraintCategory, category: ConstraintCategory,
universe_info: UniverseInfo<'tcx>, universe_info: UniverseInfo<'tcx>,
) -> Self { ) -> Self {
Self { infcx, borrowck_context, param_env, locations, category, universe_info } Self { type_checker, locations, category, universe_info }
} }
} }
impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> { impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
fn param_env(&self) -> ty::ParamEnv<'tcx> { fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env self.type_checker.param_env
} }
fn create_next_universe(&mut self) -> ty::UniverseIndex { fn create_next_universe(&mut self) -> ty::UniverseIndex {
let universe = self.infcx.create_next_universe(); let universe = self.type_checker.infcx.create_next_universe();
self.borrowck_context self.type_checker
.borrowck_context
.constraints .constraints
.universe_causes .universe_causes
.insert(universe, self.universe_info.clone()); .insert(universe, self.universe_info.clone());
@ -90,15 +79,18 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> { fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> {
let origin = NllRegionVariableOrigin::Existential { from_forall }; let origin = NllRegionVariableOrigin::Existential { from_forall };
self.infcx.next_nll_region_var(origin) self.type_checker.infcx.next_nll_region_var(origin)
} }
fn next_placeholder_region(&mut self, placeholder: ty::PlaceholderRegion) -> ty::Region<'tcx> { fn next_placeholder_region(&mut self, placeholder: ty::PlaceholderRegion) -> ty::Region<'tcx> {
self.borrowck_context.constraints.placeholder_region(self.infcx, placeholder) self.type_checker
.borrowck_context
.constraints
.placeholder_region(self.type_checker.infcx, placeholder)
} }
fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> { fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {
self.infcx.next_nll_region_var_in_universe( self.type_checker.infcx.next_nll_region_var_in_universe(
NllRegionVariableOrigin::Existential { from_forall: false }, NllRegionVariableOrigin::Existential { from_forall: false },
universe, universe,
) )
@ -110,15 +102,17 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
sub: ty::Region<'tcx>, sub: ty::Region<'tcx>,
info: ty::VarianceDiagInfo<'tcx>, info: ty::VarianceDiagInfo<'tcx>,
) { ) {
let sub = self.borrowck_context.universal_regions.to_region_vid(sub); let sub = self.type_checker.borrowck_context.universal_regions.to_region_vid(sub);
let sup = self.borrowck_context.universal_regions.to_region_vid(sup); let sup = self.type_checker.borrowck_context.universal_regions.to_region_vid(sup);
self.borrowck_context.constraints.outlives_constraints.push(OutlivesConstraint { self.type_checker.borrowck_context.constraints.outlives_constraints.push(
OutlivesConstraint {
sup, sup,
sub, sub,
locations: self.locations, locations: self.locations,
category: self.category, category: self.category,
variance_info: info, variance_info: info,
}); },
);
} }
// We don't have to worry about the equality of consts during borrow checking // We don't have to worry about the equality of consts during borrow checking

View file

@ -594,7 +594,7 @@ impl<'a> TraitDef<'a> {
GenericParamKind::Const { ty, kw_span, .. } => { GenericParamKind::Const { ty, kw_span, .. } => {
let const_nodefault_kind = GenericParamKind::Const { let const_nodefault_kind = GenericParamKind::Const {
ty: ty.clone(), ty: ty.clone(),
kw_span: kw_span.clone(), kw_span: *kw_span,
// We can't have default values inside impl block // We can't have default values inside impl block
default: None, default: None,

View file

@ -2,7 +2,7 @@ use gccjit::RValue;
use rustc_codegen_ssa::mir::debuginfo::{FunctionDebugContext, VariableKind}; use rustc_codegen_ssa::mir::debuginfo::{FunctionDebugContext, VariableKind};
use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoMethods}; use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoMethods};
use rustc_middle::mir; use rustc_middle::mir;
use rustc_middle::ty::{Instance, Ty}; use rustc_middle::ty::{Instance, PolyExistentialTraitRef, Ty};
use rustc_span::{SourceFile, Span, Symbol}; use rustc_span::{SourceFile, Span, Symbol};
use rustc_target::abi::Size; use rustc_target::abi::Size;
use rustc_target::abi::call::FnAbi; use rustc_target::abi::call::FnAbi;
@ -31,7 +31,7 @@ impl<'a, 'gcc, 'tcx> DebugInfoBuilderMethods for Builder<'a, 'gcc, 'tcx> {
} }
impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> { impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
fn create_vtable_metadata(&self, _ty: Ty<'tcx>, _vtable: Self::Value) { fn create_vtable_metadata(&self, _ty: Ty<'tcx>, _trait_ref: Option<PolyExistentialTraitRef<'tcx>>, _vtable: Self::Value) {
// TODO(antoyo) // TODO(antoyo)
} }

View file

@ -596,7 +596,7 @@ pub(crate) fn run_pass_manager(
// tools/lto/LTOCodeGenerator.cpp // tools/lto/LTOCodeGenerator.cpp
debug!("running the pass manager"); debug!("running the pass manager");
unsafe { unsafe {
if write::should_use_new_llvm_pass_manager(config) { if write::should_use_new_llvm_pass_manager(cgcx, config) {
let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO }; let opt_stage = if thin { llvm::OptStage::ThinLTO } else { llvm::OptStage::FatLTO };
let opt_level = config.opt_level.unwrap_or(config::OptLevel::No); let opt_level = config.opt_level.unwrap_or(config::OptLevel::No);
write::optimize_with_new_llvm_pass_manager( write::optimize_with_new_llvm_pass_manager(

View file

@ -377,10 +377,19 @@ fn get_pgo_sample_use_path(config: &ModuleConfig) -> Option<CString> {
.map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap()) .map(|path_buf| CString::new(path_buf.to_string_lossy().as_bytes()).unwrap())
} }
pub(crate) fn should_use_new_llvm_pass_manager(config: &ModuleConfig) -> bool { pub(crate) fn should_use_new_llvm_pass_manager(
cgcx: &CodegenContext<LlvmCodegenBackend>,
config: &ModuleConfig,
) -> bool {
// The new pass manager is enabled by default for LLVM >= 13. // The new pass manager is enabled by default for LLVM >= 13.
// This matches Clang, which also enables it since Clang 13. // This matches Clang, which also enables it since Clang 13.
config.new_llvm_pass_manager.unwrap_or_else(|| llvm_util::get_version() >= (13, 0, 0))
// FIXME: There are some perf issues with the new pass manager
// when targeting s390x, so it is temporarily disabled for that
// arch, see https://github.com/rust-lang/rust/issues/89609
config
.new_llvm_pass_manager
.unwrap_or_else(|| cgcx.target_arch != "s390x" && llvm_util::get_version() >= (13, 0, 0))
} }
pub(crate) unsafe fn optimize_with_new_llvm_pass_manager( pub(crate) unsafe fn optimize_with_new_llvm_pass_manager(
@ -482,7 +491,7 @@ pub(crate) unsafe fn optimize(
} }
if let Some(opt_level) = config.opt_level { if let Some(opt_level) = config.opt_level {
if should_use_new_llvm_pass_manager(config) { if should_use_new_llvm_pass_manager(cgcx, config) {
let opt_stage = match cgcx.lto { let opt_stage = match cgcx.lto {
Lto::Fat => llvm::OptStage::PreLinkFatLTO, Lto::Fat => llvm::OptStage::PreLinkFatLTO,
Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO, Lto::Thin | Lto::ThinLocal => llvm::OptStage::PreLinkThinLTO,

View file

@ -828,6 +828,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
} }
fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value { fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
let op = llvm::RealPredicate::from_generic(op);
unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) } unsafe { llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED) }
} }

View file

@ -175,7 +175,7 @@ pub fn get_fn(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) -> &'ll Value
// should use dllimport for functions. // should use dllimport for functions.
if cx.use_dll_storage_attrs if cx.use_dll_storage_attrs
&& tcx.is_dllimport_foreign_item(instance_def_id) && tcx.is_dllimport_foreign_item(instance_def_id)
&& tcx.sess.target.env != "gnu" && !matches!(tcx.sess.target.env.as_ref(), "gnu" | "uclibc")
{ {
llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport); llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport);
} }

View file

@ -2,7 +2,7 @@ use self::MemberDescriptionFactory::*;
use self::RecursiveTypeDescription::*; use self::RecursiveTypeDescription::*;
use super::namespace::mangled_name_of_instance; use super::namespace::mangled_name_of_instance;
use super::type_names::compute_debuginfo_type_name; use super::type_names::{compute_debuginfo_type_name, compute_debuginfo_vtable_name};
use super::utils::{ use super::utils::{
create_DIArray, debug_context, get_namespace_for_item, is_node_local_to_unit, DIB, create_DIArray, debug_context, get_namespace_for_item, is_node_local_to_unit, DIB,
}; };
@ -29,8 +29,9 @@ use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::{self, GeneratorLayout}; use rustc_middle::mir::{self, GeneratorLayout};
use rustc_middle::ty::layout::{self, IntegerExt, LayoutOf, PrimitiveExt, TyAndLayout}; use rustc_middle::ty::layout::{self, IntegerExt, LayoutOf, PrimitiveExt, TyAndLayout};
use rustc_middle::ty::subst::GenericArgKind; use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::Instance; use rustc_middle::ty::{
use rustc_middle::ty::{self, AdtKind, GeneratorSubsts, ParamEnv, Ty, TyCtxt}; self, AdtKind, GeneratorSubsts, Instance, ParamEnv, Ty, TyCtxt, COMMON_VTABLE_ENTRIES,
};
use rustc_middle::{bug, span_bug}; use rustc_middle::{bug, span_bug};
use rustc_query_system::ich::NodeIdHashingMode; use rustc_query_system::ich::NodeIdHashingMode;
use rustc_session::config::{self, DebugInfo}; use rustc_session::config::{self, DebugInfo};
@ -2591,11 +2592,45 @@ pub fn create_global_var_metadata(cx: &CodegenCx<'ll, '_>, def_id: DefId, global
} }
} }
/// Generates LLVM debuginfo for a vtable.
fn vtable_type_metadata(
cx: &CodegenCx<'ll, 'tcx>,
ty: Ty<'tcx>,
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> &'ll DIType {
let tcx = cx.tcx;
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
let trait_ref = tcx.erase_regions(trait_ref);
tcx.vtable_entries(trait_ref)
} else {
COMMON_VTABLE_ENTRIES
};
// FIXME: We describe the vtable as an array of *const () pointers. The length of the array is
// correct - but we could create a more accurate description, e.g. by describing it
// as a struct where each field has a name that corresponds to the name of the method
// it points to.
// However, this is not entirely straightforward because there might be multiple
// methods with the same name if the vtable is for multiple traits. So for now we keep
// things simple instead of adding some ad-hoc disambiguation scheme.
let vtable_type = tcx.mk_array(tcx.mk_imm_ptr(tcx.types.unit), vtable_entries.len() as u64);
type_metadata(cx, vtable_type, rustc_span::DUMMY_SP)
}
/// Creates debug information for the given vtable, which is for the /// Creates debug information for the given vtable, which is for the
/// given type. /// given type.
/// ///
/// Adds the created metadata nodes directly to the crate's IR. /// Adds the created metadata nodes directly to the crate's IR.
pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &'ll Value) { pub fn create_vtable_metadata(
cx: &CodegenCx<'ll, 'tcx>,
ty: Ty<'tcx>,
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
vtable: &'ll Value,
) {
if cx.dbg_cx.is_none() { if cx.dbg_cx.is_none() {
return; return;
} }
@ -2605,42 +2640,16 @@ pub fn create_vtable_metadata(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>, vtable: &
return; return;
} }
let type_metadata = type_metadata(cx, ty, rustc_span::DUMMY_SP); let vtable_name = compute_debuginfo_vtable_name(cx.tcx, ty, poly_trait_ref);
let vtable_type = vtable_type_metadata(cx, ty, poly_trait_ref);
unsafe { unsafe {
// `LLVMRustDIBuilderCreateStructType()` wants an empty array. A null
// pointer will lead to hard to trace and debug LLVM assertions
// later on in `llvm/lib/IR/Value.cpp`.
let empty_array = create_DIArray(DIB(cx), &[]);
let name = "vtable";
// Create a new one each time. We don't want metadata caching
// here, because each vtable will refer to a unique containing
// type.
let vtable_type = llvm::LLVMRustDIBuilderCreateStructType(
DIB(cx),
NO_SCOPE_METADATA,
name.as_ptr().cast(),
name.len(),
unknown_file_metadata(cx),
UNKNOWN_LINE_NUMBER,
Size::ZERO.bits(),
cx.tcx.data_layout.pointer_align.abi.bits() as u32,
DIFlags::FlagArtificial,
None,
empty_array,
0,
Some(type_metadata),
name.as_ptr().cast(),
name.len(),
);
let linkage_name = ""; let linkage_name = "";
llvm::LLVMRustDIBuilderCreateStaticVariable( llvm::LLVMRustDIBuilderCreateStaticVariable(
DIB(cx), DIB(cx),
NO_SCOPE_METADATA, NO_SCOPE_METADATA,
name.as_ptr().cast(), vtable_name.as_ptr().cast(),
name.len(), vtable_name.len(),
linkage_name.as_ptr().cast(), linkage_name.as_ptr().cast(),
linkage_name.len(), linkage_name.len(),
unknown_file_metadata(cx), unknown_file_metadata(cx),

View file

@ -550,8 +550,13 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
unsafe { llvm::LLVMRustDIBuilderCreateDebugLocation(line, col, scope, inlined_at) } unsafe { llvm::LLVMRustDIBuilderCreateDebugLocation(line, col, scope, inlined_at) }
} }
fn create_vtable_metadata(&self, ty: Ty<'tcx>, vtable: Self::Value) { fn create_vtable_metadata(
metadata::create_vtable_metadata(self, ty, vtable) &self,
ty: Ty<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
vtable: Self::Value,
) {
metadata::create_vtable_metadata(self, ty, trait_ref, vtable)
} }
fn extend_scope_to_file( fn extend_scope_to_file(

View file

@ -223,6 +223,33 @@ pub enum RealPredicate {
RealPredicateTrue = 15, RealPredicateTrue = 15,
} }
impl RealPredicate {
pub fn from_generic(realp: rustc_codegen_ssa::common::RealPredicate) -> Self {
match realp {
rustc_codegen_ssa::common::RealPredicate::RealPredicateFalse => {
RealPredicate::RealPredicateFalse
}
rustc_codegen_ssa::common::RealPredicate::RealOEQ => RealPredicate::RealOEQ,
rustc_codegen_ssa::common::RealPredicate::RealOGT => RealPredicate::RealOGT,
rustc_codegen_ssa::common::RealPredicate::RealOGE => RealPredicate::RealOGE,
rustc_codegen_ssa::common::RealPredicate::RealOLT => RealPredicate::RealOLT,
rustc_codegen_ssa::common::RealPredicate::RealOLE => RealPredicate::RealOLE,
rustc_codegen_ssa::common::RealPredicate::RealONE => RealPredicate::RealONE,
rustc_codegen_ssa::common::RealPredicate::RealORD => RealPredicate::RealORD,
rustc_codegen_ssa::common::RealPredicate::RealUNO => RealPredicate::RealUNO,
rustc_codegen_ssa::common::RealPredicate::RealUEQ => RealPredicate::RealUEQ,
rustc_codegen_ssa::common::RealPredicate::RealUGT => RealPredicate::RealUGT,
rustc_codegen_ssa::common::RealPredicate::RealUGE => RealPredicate::RealUGE,
rustc_codegen_ssa::common::RealPredicate::RealULT => RealPredicate::RealULT,
rustc_codegen_ssa::common::RealPredicate::RealULE => RealPredicate::RealULE,
rustc_codegen_ssa::common::RealPredicate::RealUNE => RealPredicate::RealUNE,
rustc_codegen_ssa::common::RealPredicate::RealPredicateTrue => {
RealPredicate::RealPredicateTrue
}
}
}
}
/// LLVMTypeKind /// LLVMTypeKind
#[derive(Copy, Clone, PartialEq, Debug)] #[derive(Copy, Clone, PartialEq, Debug)]
#[repr(C)] #[repr(C)]

View file

@ -843,19 +843,18 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
let msg_bus = "clang: error: unable to execute command: Bus error: 10"; let msg_bus = "clang: error: unable to execute command: Bus error: 10";
if out.contains(msg_segv) || out.contains(msg_bus) { if out.contains(msg_segv) || out.contains(msg_bus) {
warn!( warn!(
?cmd, %out,
"looks like the linker segfaulted when we tried to call it, \ "looks like the linker segfaulted when we tried to call it, \
automatically retrying again. cmd = {:?}, out = {}.", automatically retrying again",
cmd, out,
); );
continue; continue;
} }
if is_illegal_instruction(&output.status) { if is_illegal_instruction(&output.status) {
warn!( warn!(
?cmd, %out, status = %output.status,
"looks like the linker hit an illegal instruction when we \ "looks like the linker hit an illegal instruction when we \
tried to call it, automatically retrying again. cmd = {:?}, ]\ tried to call it, automatically retrying again.",
out = {}, status = {}.",
cmd, out, output.status,
); );
continue; continue;
} }

View file

@ -446,6 +446,62 @@ fn push_debuginfo_type_name<'tcx>(
} }
} }
/// Computes a name for the global variable storing a vtable.
///
/// The name is of the form:
///
/// `<path::to::SomeType as path::to::SomeTrait>::{vtable}`
///
/// or, when generating C++-like names:
///
/// `impl$<path::to::SomeType, path::to::SomeTrait>::vtable$`
pub fn compute_debuginfo_vtable_name<'tcx>(
tcx: TyCtxt<'tcx>,
t: Ty<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> String {
let cpp_like_names = cpp_like_names(tcx);
let mut vtable_name = String::with_capacity(64);
if cpp_like_names {
vtable_name.push_str("impl$<");
} else {
vtable_name.push('<');
}
let mut visited = FxHashSet::default();
push_debuginfo_type_name(tcx, t, true, &mut vtable_name, &mut visited);
if cpp_like_names {
vtable_name.push_str(", ");
} else {
vtable_name.push_str(" as ");
}
if let Some(trait_ref) = trait_ref {
push_item_name(tcx, trait_ref.skip_binder().def_id, true, &mut vtable_name);
visited.clear();
push_generic_params_internal(
tcx,
trait_ref.skip_binder().substs,
&mut vtable_name,
&mut visited,
);
} else {
vtable_name.push_str("_");
}
push_close_angle_bracket(cpp_like_names, &mut vtable_name);
let suffix = if cpp_like_names { "::vtable$" } else { "::{vtable}" };
vtable_name.reserve_exact(suffix.len());
vtable_name.push_str(suffix);
vtable_name
}
pub fn push_item_name(tcx: TyCtxt<'tcx>, def_id: DefId, qualified: bool, output: &mut String) { pub fn push_item_name(tcx: TyCtxt<'tcx>, def_id: DefId, qualified: bool, output: &mut String) {
let def_key = tcx.def_key(def_id); let def_key = tcx.def_key(def_id);
if qualified { if qualified {

View file

@ -78,7 +78,7 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
let align = cx.data_layout().pointer_align.abi; let align = cx.data_layout().pointer_align.abi;
let vtable = cx.static_addr_of(vtable_const, align, Some("vtable")); let vtable = cx.static_addr_of(vtable_const, align, Some("vtable"));
cx.create_vtable_metadata(ty, vtable); cx.create_vtable_metadata(ty, trait_ref, vtable);
cx.vtables().borrow_mut().insert((ty, trait_ref), vtable); cx.vtables().borrow_mut().insert((ty, trait_ref), vtable);
vtable vtable
} }

View file

@ -1,13 +1,18 @@
use super::BackendTypes; use super::BackendTypes;
use crate::mir::debuginfo::{FunctionDebugContext, VariableKind}; use crate::mir::debuginfo::{FunctionDebugContext, VariableKind};
use rustc_middle::mir; use rustc_middle::mir;
use rustc_middle::ty::{Instance, Ty}; use rustc_middle::ty::{Instance, PolyExistentialTraitRef, Ty};
use rustc_span::{SourceFile, Span, Symbol}; use rustc_span::{SourceFile, Span, Symbol};
use rustc_target::abi::call::FnAbi; use rustc_target::abi::call::FnAbi;
use rustc_target::abi::Size; use rustc_target::abi::Size;
pub trait DebugInfoMethods<'tcx>: BackendTypes { pub trait DebugInfoMethods<'tcx>: BackendTypes {
fn create_vtable_metadata(&self, ty: Ty<'tcx>, vtable: Self::Value); fn create_vtable_metadata(
&self,
ty: Ty<'tcx>,
trait_ref: Option<PolyExistentialTraitRef<'tcx>>,
vtable: Self::Value,
);
/// Creates the function-specific debug context. /// Creates the function-specific debug context.
/// ///

View file

@ -14,7 +14,7 @@ const BASE_64: &[u8; MAX_BASE as usize] =
#[inline] #[inline]
pub fn push_str(mut n: u128, base: usize, output: &mut String) { pub fn push_str(mut n: u128, base: usize, output: &mut String) {
debug_assert!(base >= 2 && base <= MAX_BASE); debug_assert!((2..=MAX_BASE).contains(&base));
let mut s = [0u8; 128]; let mut s = [0u8; 128];
let mut index = 0; let mut index = 0;

View file

@ -206,17 +206,11 @@ impl<N: Debug, E: Debug> Graph<N, E> {
AdjacentEdges { graph: self, direction, next: first_edge } AdjacentEdges { graph: self, direction, next: first_edge }
} }
pub fn successor_nodes<'a>( pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
&'a self,
source: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.outgoing_edges(source).targets() self.outgoing_edges(source).targets()
} }
pub fn predecessor_nodes<'a>( pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
&'a self,
target: NodeIndex,
) -> impl Iterator<Item = NodeIndex> + 'a {
self.incoming_edges(target).sources() self.incoming_edges(target).sources()
} }

View file

@ -48,7 +48,7 @@ fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
let node = frame.node; let node = frame.node;
visited[node] = true; visited[node] = true;
while let Some(successor) = frame.iter.next() { for successor in frame.iter.by_ref() {
if !visited[successor] { if !visited[successor] {
stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) }); stack.push(PostOrderFrame { node: successor, iter: graph.successors(successor) });
continue 'recurse; continue 'recurse;
@ -112,7 +112,7 @@ where
/// This is equivalent to just invoke `next` repeatedly until /// This is equivalent to just invoke `next` repeatedly until
/// you get a `None` result. /// you get a `None` result.
pub fn complete_search(&mut self) { pub fn complete_search(&mut self) {
while let Some(_) = self.next() {} for _ in self {}
} }
/// Returns true if node has been visited thus far. /// Returns true if node has been visited thus far.

View file

@ -390,7 +390,7 @@ impl<O: ForestObligation> ObligationForest<O> {
.map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) }) .map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) })
.collect(); .collect();
self.compress(|_| assert!(false)); self.compress(|_| unreachable!());
errors errors
} }
@ -612,7 +612,7 @@ impl<O: ForestObligation> ObligationForest<O> {
fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) { fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) {
let orig_nodes_len = self.nodes.len(); let orig_nodes_len = self.nodes.len();
let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec); let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec);
debug_assert!(node_rewrites.is_empty()); assert!(node_rewrites.is_empty());
node_rewrites.extend(0..orig_nodes_len); node_rewrites.extend(0..orig_nodes_len);
let mut dead_nodes = 0; let mut dead_nodes = 0;
@ -623,13 +623,13 @@ impl<O: ForestObligation> ObligationForest<O> {
// self.nodes[0..index - dead_nodes] are the first remaining nodes // self.nodes[0..index - dead_nodes] are the first remaining nodes
// self.nodes[index - dead_nodes..index] are all dead // self.nodes[index - dead_nodes..index] are all dead
// self.nodes[index..] are unchanged // self.nodes[index..] are unchanged
for index in 0..orig_nodes_len { for (index, node_rewrite) in node_rewrites.iter_mut().enumerate() {
let node = &self.nodes[index]; let node = &self.nodes[index];
match node.state.get() { match node.state.get() {
NodeState::Pending | NodeState::Waiting => { NodeState::Pending | NodeState::Waiting => {
if dead_nodes > 0 { if dead_nodes > 0 {
self.nodes.swap(index, index - dead_nodes); self.nodes.swap(index, index - dead_nodes);
node_rewrites[index] -= dead_nodes; *node_rewrite -= dead_nodes;
} }
} }
NodeState::Done => { NodeState::Done => {
@ -646,7 +646,7 @@ impl<O: ForestObligation> ObligationForest<O> {
} }
// Extract the success stories. // Extract the success stories.
outcome_cb(&node.obligation); outcome_cb(&node.obligation);
node_rewrites[index] = orig_nodes_len; *node_rewrite = orig_nodes_len;
dead_nodes += 1; dead_nodes += 1;
} }
NodeState::Error => { NodeState::Error => {
@ -655,7 +655,7 @@ impl<O: ForestObligation> ObligationForest<O> {
// check against. // check against.
self.active_cache.remove(&node.obligation.as_cache_key()); self.active_cache.remove(&node.obligation.as_cache_key());
self.insert_into_error_cache(index); self.insert_into_error_cache(index);
node_rewrites[index] = orig_nodes_len; *node_rewrite = orig_nodes_len;
dead_nodes += 1; dead_nodes += 1;
} }
NodeState::Success => unreachable!(), NodeState::Success => unreachable!(),

View file

@ -205,10 +205,10 @@ impl<K: Ord, V> SortedMap<K, V> {
R: RangeBounds<K>, R: RangeBounds<K>,
{ {
let start = match range.start_bound() { let start = match range.start_bound() {
Bound::Included(ref k) => match self.lookup_index_for(k) { Bound::Included(k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index, Ok(index) | Err(index) => index,
}, },
Bound::Excluded(ref k) => match self.lookup_index_for(k) { Bound::Excluded(k) => match self.lookup_index_for(k) {
Ok(index) => index + 1, Ok(index) => index + 1,
Err(index) => index, Err(index) => index,
}, },
@ -216,11 +216,11 @@ impl<K: Ord, V> SortedMap<K, V> {
}; };
let end = match range.end_bound() { let end = match range.end_bound() {
Bound::Included(ref k) => match self.lookup_index_for(k) { Bound::Included(k) => match self.lookup_index_for(k) {
Ok(index) => index + 1, Ok(index) => index + 1,
Err(index) => index, Err(index) => index,
}, },
Bound::Excluded(ref k) => match self.lookup_index_for(k) { Bound::Excluded(k) => match self.lookup_index_for(k) {
Ok(index) | Err(index) => index, Ok(index) | Err(index) => index,
}, },
Bound::Unbounded => self.data.len(), Bound::Unbounded => self.data.len(),

View file

@ -75,7 +75,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
/// ///
/// If there are multiple items that are equivalent to `key`, they will be yielded in /// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order. /// insertion order.
pub fn get_by_key(&'a self, key: K) -> impl 'a + Iterator<Item = &'a V> { pub fn get_by_key(&self, key: K) -> impl Iterator<Item = &V> {
self.get_by_key_enumerated(key).map(|(_, v)| v) self.get_by_key_enumerated(key).map(|(_, v)| v)
} }
@ -84,7 +84,7 @@ impl<I: Idx, K: Ord, V> SortedIndexMultiMap<I, K, V> {
/// ///
/// If there are multiple items that are equivalent to `key`, they will be yielded in /// If there are multiple items that are equivalent to `key`, they will be yielded in
/// insertion order. /// insertion order.
pub fn get_by_key_enumerated(&'a self, key: K) -> impl '_ + Iterator<Item = (I, &V)> { pub fn get_by_key_enumerated(&self, key: K) -> impl Iterator<Item = (I, &V)> {
let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key); let lower_bound = self.idx_sorted_by_item_key.partition_point(|&i| self.items[i].0 < key);
self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| { self.idx_sorted_by_item_key[lower_bound..].iter().map_while(move |&i| {
let (k, v) = &self.items[i]; let (k, v) = &self.items[i];

View file

@ -257,11 +257,7 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
pub fn remove(&mut self, key: &K) -> Option<V> { pub fn remove(&mut self, key: &K) -> Option<V> {
match self { match self {
SsoHashMap::Array(array) => { SsoHashMap::Array(array) => {
if let Some(index) = array.iter().position(|(k, _v)| k == key) { array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index).1)
Some(array.swap_remove(index).1)
} else {
None
}
} }
SsoHashMap::Map(map) => map.remove(key), SsoHashMap::Map(map) => map.remove(key),
} }
@ -272,11 +268,7 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> { pub fn remove_entry(&mut self, key: &K) -> Option<(K, V)> {
match self { match self {
SsoHashMap::Array(array) => { SsoHashMap::Array(array) => {
if let Some(index) = array.iter().position(|(k, _v)| k == key) { array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index))
Some(array.swap_remove(index))
} else {
None
}
} }
SsoHashMap::Map(map) => map.remove_entry(key), SsoHashMap::Map(map) => map.remove_entry(key),
} }
@ -423,14 +415,14 @@ impl<K, V> IntoIterator for SsoHashMap<K, V> {
/// adapts Item of array reference iterator to Item of hashmap reference iterator. /// adapts Item of array reference iterator to Item of hashmap reference iterator.
#[inline(always)] #[inline(always)]
fn adapt_array_ref_it<K, V>(pair: &'a (K, V)) -> (&'a K, &'a V) { fn adapt_array_ref_it<K, V>(pair: &(K, V)) -> (&K, &V) {
let (a, b) = pair; let (a, b) = pair;
(a, b) (a, b)
} }
/// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator. /// adapts Item of array mut reference iterator to Item of hashmap mut reference iterator.
#[inline(always)] #[inline(always)]
fn adapt_array_mut_it<K, V>(pair: &'a mut (K, V)) -> (&'a K, &'a mut V) { fn adapt_array_mut_it<K, V>(pair: &mut (K, V)) -> (&K, &mut V) {
let (a, b) = pair; let (a, b) = pair;
(a, b) (a, b)
} }

View file

@ -75,7 +75,7 @@ impl<T> SsoHashSet<T> {
/// An iterator visiting all elements in arbitrary order. /// An iterator visiting all elements in arbitrary order.
/// The iterator element type is `&'a T`. /// The iterator element type is `&'a T`.
#[inline] #[inline]
pub fn iter(&'a self) -> impl Iterator<Item = &'a T> { pub fn iter(&self) -> impl Iterator<Item = &T> {
self.into_iter() self.into_iter()
} }

View file

@ -229,14 +229,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {
impl<CTX> HashStable<CTX> for f32 { impl<CTX> HashStable<CTX> for f32 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u32 = unsafe { ::std::mem::transmute(*self) }; let val: u32 = self.to_bits();
val.hash_stable(ctx, hasher); val.hash_stable(ctx, hasher);
} }
} }
impl<CTX> HashStable<CTX> for f64 { impl<CTX> HashStable<CTX> for f64 {
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
let val: u64 = unsafe { ::std::mem::transmute(*self) }; let val: u64 = self.to_bits();
val.hash_stable(ctx, hasher); val.hash_stable(ctx, hasher);
} }
} }

View file

@ -5,6 +5,7 @@ const RED_ZONE: usize = 100 * 1024; // 100k
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then // Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high. // on. This flag has performance relevant characteristics. Don't set it too high.
#[allow(clippy::identity_op)]
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations /// Grows the stack on demand to prevent stack overflow. Call this in strategic locations

View file

@ -34,7 +34,7 @@ impl<T> Steal<T> {
#[track_caller] #[track_caller]
pub fn borrow(&self) -> MappedReadGuard<'_, T> { pub fn borrow(&self) -> MappedReadGuard<'_, T> {
let borrow = self.value.borrow(); let borrow = self.value.borrow();
if let None = &*borrow { if borrow.is_none() {
panic!("attempted to read from stolen value: {}", std::any::type_name::<T>()); panic!("attempted to read from stolen value: {}", std::any::type_name::<T>());
} }
ReadGuard::map(borrow, |opt| opt.as_ref().unwrap()) ReadGuard::map(borrow, |opt| opt.as_ref().unwrap())

View file

@ -48,7 +48,7 @@ impl<T: PartialEq> TinyList<T> {
#[inline] #[inline]
pub fn contains(&self, data: &T) -> bool { pub fn contains(&self, data: &T) -> bool {
let mut elem = self.head.as_ref(); let mut elem = self.head.as_ref();
while let Some(ref e) = elem { while let Some(e) = elem {
if &e.data == data { if &e.data == data {
return true; return true;
} }

View file

@ -2,8 +2,8 @@ use rustc_index::vec::{Idx, IndexVec};
pub fn iter<Ls>( pub fn iter<Ls>(
first: Option<Ls::LinkIndex>, first: Option<Ls::LinkIndex>,
links: &'a Ls, links: &Ls,
) -> impl Iterator<Item = Ls::LinkIndex> + 'a ) -> impl Iterator<Item = Ls::LinkIndex> + '_
where where
Ls: Links, Ls: Links,
{ {

View file

@ -1253,12 +1253,16 @@ pub fn init_rustc_env_logger() {
/// tracing crate version. In contrast to `init_rustc_env_logger` it allows you to choose an env var /// tracing crate version. In contrast to `init_rustc_env_logger` it allows you to choose an env var
/// other than `RUSTC_LOG`. /// other than `RUSTC_LOG`.
pub fn init_env_logger(env: &str) { pub fn init_env_logger(env: &str) {
// Don't register a dispatcher if there's no filter to print anything use tracing_subscriber::{
match std::env::var(env) { filter::{self, EnvFilter, LevelFilter},
Err(_) => return, layer::SubscriberExt,
Ok(s) if s.is_empty() => return, };
Ok(_) => {}
} let filter = match std::env::var(env) {
Ok(env) => EnvFilter::new(env),
_ => EnvFilter::default().add_directive(filter::Directive::from(LevelFilter::WARN)),
};
let color_logs = match std::env::var(String::from(env) + "_COLOR") { let color_logs = match std::env::var(String::from(env) + "_COLOR") {
Ok(value) => match value.as_ref() { Ok(value) => match value.as_ref() {
"always" => true, "always" => true,
@ -1278,7 +1282,7 @@ pub fn init_env_logger(env: &str) {
"non-Unicode log color value: expected one of always, never, or auto", "non-Unicode log color value: expected one of always, never, or auto",
), ),
}; };
let filter = tracing_subscriber::EnvFilter::from_env(env);
let layer = tracing_tree::HierarchicalLayer::default() let layer = tracing_tree::HierarchicalLayer::default()
.with_writer(io::stderr) .with_writer(io::stderr)
.with_indent_lines(true) .with_indent_lines(true)
@ -1288,7 +1292,6 @@ pub fn init_env_logger(env: &str) {
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
let layer = layer.with_thread_ids(true).with_thread_names(true); let layer = layer.with_thread_ids(true).with_thread_names(true);
use tracing_subscriber::layer::SubscriberExt;
let subscriber = tracing_subscriber::Registry::default().with(filter).with(layer); let subscriber = tracing_subscriber::Registry::default().with(filter).with(layer);
tracing::subscriber::set_global_default(subscriber).unwrap(); tracing::subscriber::set_global_default(subscriber).unwrap();
} }

View file

@ -242,6 +242,7 @@ E0468: include_str!("./error_codes/E0468.md"),
E0469: include_str!("./error_codes/E0469.md"), E0469: include_str!("./error_codes/E0469.md"),
E0477: include_str!("./error_codes/E0477.md"), E0477: include_str!("./error_codes/E0477.md"),
E0478: include_str!("./error_codes/E0478.md"), E0478: include_str!("./error_codes/E0478.md"),
E0482: include_str!("./error_codes/E0482.md"),
E0491: include_str!("./error_codes/E0491.md"), E0491: include_str!("./error_codes/E0491.md"),
E0492: include_str!("./error_codes/E0492.md"), E0492: include_str!("./error_codes/E0492.md"),
E0493: include_str!("./error_codes/E0493.md"), E0493: include_str!("./error_codes/E0493.md"),
@ -599,7 +600,6 @@ E0785: include_str!("./error_codes/E0785.md"),
// E0479, // the type `..` (provided as the value of a type parameter) is... // E0479, // the type `..` (provided as the value of a type parameter) is...
// E0480, // lifetime of method receiver does not outlive the method call // E0480, // lifetime of method receiver does not outlive the method call
// E0481, // lifetime of function argument does not outlive the function call // E0481, // lifetime of function argument does not outlive the function call
E0482, // lifetime of return value does not outlive the function call
// E0483, // lifetime of operand does not outlive the operation // E0483, // lifetime of operand does not outlive the operation
// E0484, // reference is not valid at the time of borrow // E0484, // reference is not valid at the time of borrow
// E0485, // automatically reference is not valid at the time of borrow // E0485, // automatically reference is not valid at the time of borrow

View file

@ -0,0 +1,73 @@
A lifetime of a returned value does not outlive the function call.
Erroneous code example:
```compile_fail,E0482
fn prefix<'a>(
words: impl Iterator<Item = &'a str>
) -> impl Iterator<Item = String> { // error!
words.map(|v| format!("foo-{}", v))
}
```
To fix this error, make the lifetime of the returned value explicit:
```
fn prefix<'a>(
words: impl Iterator<Item = &'a str> + 'a
) -> impl Iterator<Item = String> + 'a { // ok!
words.map(|v| format!("foo-{}", v))
}
```
The [`impl Trait`] feature in this example uses an implicit `'static` lifetime
restriction in the returned type. However the type implementing the `Iterator`
passed to the function lives just as long as `'a`, which is not long enough.
The solution involves adding lifetime bound to both function argument and
the return value to make sure that the values inside the iterator
are not dropped when the function goes out of the scope.
An alternative solution would be to guarantee that the `Item` references
in the iterator are alive for the whole lifetime of the program.
```
fn prefix(
words: impl Iterator<Item = &'static str>
) -> impl Iterator<Item = String> { // ok!
words.map(|v| format!("foo-{}", v))
}
```
A similar lifetime problem might arise when returning closures:
```compile_fail,E0482
fn foo(
x: &mut Vec<i32>
) -> impl FnMut(&mut Vec<i32>) -> &[i32] { // error!
|y| {
y.append(x);
y
}
}
```
Analogically, a solution here is to use explicit return lifetime
and move the ownership of the variable to the closure.
```
fn foo<'a>(
x: &'a mut Vec<i32>
) -> impl FnMut(&mut Vec<i32>) -> &[i32] + 'a { // ok!
move |y| {
y.append(x);
y
}
}
```
To better understand the lifetime treatment in the [`impl Trait`],
please see the [RFC 1951].
[`impl Trait`]: https://doc.rust-lang.org/reference/types/impl-trait.html
[RFC 1951]: https://rust-lang.github.io/rfcs/1951-expand-impl-trait.html

View file

@ -2308,7 +2308,7 @@ pub fn is_case_difference(sm: &SourceMap, suggested: &str, sp: Span) -> bool {
let found = match sm.span_to_snippet(sp) { let found = match sm.span_to_snippet(sp) {
Ok(snippet) => snippet, Ok(snippet) => snippet,
Err(e) => { Err(e) => {
warn!("Invalid span {:?}. Err={:?}", sp, e); warn!(error = ?e, "Invalid span {:?}", sp);
return false; return false;
} }
}; };

View file

@ -288,7 +288,7 @@ declare_features! (
(accepted, member_constraints, "1.54.0", Some(61997), None), (accepted, member_constraints, "1.54.0", Some(61997), None),
/// Allows bindings in the subpattern of a binding pattern. /// Allows bindings in the subpattern of a binding pattern.
/// For example, you can write `x @ Some(y)`. /// For example, you can write `x @ Some(y)`.
(accepted, bindings_after_at, "1.54.0", Some(65490), None), (accepted, bindings_after_at, "1.56.0", Some(65490), None),
/// Allows calling `transmute` in const fn /// Allows calling `transmute` in const fn
(accepted, const_fn_transmute, "1.56.0", Some(53605), None), (accepted, const_fn_transmute, "1.56.0", Some(53605), None),
/// Allows accessing fields of unions inside `const` functions. /// Allows accessing fields of unions inside `const` functions.

View file

@ -678,6 +678,9 @@ declare_features! (
/// Allows `#[doc(cfg_hide(...))]`. /// Allows `#[doc(cfg_hide(...))]`.
(active, doc_cfg_hide, "1.57.0", Some(43781), None), (active, doc_cfg_hide, "1.57.0", Some(43781), None),
/// Allows using the `non_exhaustive_omitted_patterns` lint.
(active, non_exhaustive_omitted_patterns_lint, "1.57.0", Some(89554), None),
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// feature-group-end: actual feature gates // feature-group-end: actual feature gates
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------

View file

@ -512,7 +512,7 @@ impl<'a> LabelText<'a> {
pub fn to_dot_string(&self) -> String { pub fn to_dot_string(&self) -> String {
match *self { match *self {
LabelStr(ref s) => format!("\"{}\"", s.escape_default()), LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)), EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
HtmlStr(ref s) => format!("<{}>", s), HtmlStr(ref s) => format!("<{}>", s),
} }
} }

View file

@ -2293,6 +2293,13 @@ impl<'hir> InlineAsmOperand<'hir> {
Self::Const { .. } | Self::Sym { .. } => None, Self::Const { .. } | Self::Sym { .. } => None,
} }
} }
pub fn is_clobber(&self) -> bool {
matches!(
self,
InlineAsmOperand::Out { reg: InlineAsmRegOrRegClass::Reg(_), late: _, expr: None }
)
}
} }
#[derive(Debug, HashStable_Generic)] #[derive(Debug, HashStable_Generic)]

View file

@ -990,9 +990,8 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
pub fn insert_all_into_row(&mut self, row: R) { pub fn insert_all_into_row(&mut self, row: R) {
assert!(row.index() < self.num_rows); assert!(row.index() < self.num_rows);
let (start, end) = self.range(row); let (start, end) = self.range(row);
let words = &mut self.words[..]; for word in self.words[start..end].iter_mut() {
for index in start..end { *word = !0;
words[index] = !0;
} }
self.clear_excess_bits(row); self.clear_excess_bits(row);
} }
@ -1144,7 +1143,7 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
/// Iterates through all the columns set to true in a given row of /// Iterates through all the columns set to true in a given row of
/// the matrix. /// the matrix.
pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a { pub fn iter(&self, row: R) -> impl Iterator<Item = C> + '_ {
self.row(row).into_iter().flat_map(|r| r.iter()) self.row(row).into_iter().flat_map(|r| r.iter())
} }

View file

@ -634,18 +634,15 @@ impl<I: Idx, T> IndexVec<I, T> {
} }
#[inline] #[inline]
pub fn drain<'a, R: RangeBounds<usize>>( pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> impl Iterator<Item = T> + '_ {
&'a mut self,
range: R,
) -> impl Iterator<Item = T> + 'a {
self.raw.drain(range) self.raw.drain(range)
} }
#[inline] #[inline]
pub fn drain_enumerated<'a, R: RangeBounds<usize>>( pub fn drain_enumerated<R: RangeBounds<usize>>(
&'a mut self, &mut self,
range: R, range: R,
) -> impl Iterator<Item = (I, T)> + 'a { ) -> impl Iterator<Item = (I, T)> + '_ {
self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t)) self.raw.drain(range).enumerate().map(|(n, t)| (I::new(n), t))
} }
@ -741,6 +738,12 @@ impl<I: Idx, T> IndexVec<I, Option<T>> {
self.ensure_contains_elem(index, || None); self.ensure_contains_elem(index, || None);
self[index].get_or_insert_with(value) self[index].get_or_insert_with(value)
} }
#[inline]
pub fn remove(&mut self, index: I) -> Option<T> {
self.ensure_contains_elem(index, || None);
self[index].take()
}
} }
impl<I: Idx, T: Clone> IndexVec<I, T> { impl<I: Idx, T: Clone> IndexVec<I, T> {

View file

@ -2060,14 +2060,24 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
expected: exp_found.expected.print_only_trait_path(), expected: exp_found.expected.print_only_trait_path(),
found: exp_found.found.print_only_trait_path(), found: exp_found.found.print_only_trait_path(),
}; };
self.expected_found_str(pretty_exp_found) match self.expected_found_str(pretty_exp_found) {
Some((expected, found)) if expected == found => {
self.expected_found_str(exp_found)
}
ret => ret,
}
} }
infer::PolyTraitRefs(exp_found) => { infer::PolyTraitRefs(exp_found) => {
let pretty_exp_found = ty::error::ExpectedFound { let pretty_exp_found = ty::error::ExpectedFound {
expected: exp_found.expected.print_only_trait_path(), expected: exp_found.expected.print_only_trait_path(),
found: exp_found.found.print_only_trait_path(), found: exp_found.found.print_only_trait_path(),
}; };
self.expected_found_str(pretty_exp_found) match self.expected_found_str(pretty_exp_found) {
Some((expected, found)) if expected == found => {
self.expected_found_str(exp_found)
}
ret => ret,
}
} }
} }
} }

View file

@ -130,8 +130,8 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
.tcx() .tcx()
.sess .sess
.struct_span_err(span, &format!("`impl` associated type signature for `{}` doesn't match `trait` associated type signature", item_name)); .struct_span_err(span, &format!("`impl` associated type signature for `{}` doesn't match `trait` associated type signature", item_name));
err.span_label(impl_sp, &format!("found")); err.span_label(impl_sp, "found");
err.span_label(trait_sp, &format!("expected")); err.span_label(trait_sp, "expected");
err.emit(); err.emit();
} }

View file

@ -68,11 +68,10 @@ pub enum EscapeError {
impl EscapeError { impl EscapeError {
/// Returns true for actual errors, as opposed to warnings. /// Returns true for actual errors, as opposed to warnings.
pub fn is_fatal(&self) -> bool { pub fn is_fatal(&self) -> bool {
match self { !matches!(
EscapeError::UnskippedWhitespaceWarning => false, self,
EscapeError::MultipleSkippedLinesWarning => false, EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning
_ => true, )
}
} }
} }
@ -330,7 +329,7 @@ where
callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning)); callback(start..end, Err(EscapeError::MultipleSkippedLinesWarning));
} }
let tail = &tail[first_non_space..]; let tail = &tail[first_non_space..];
if let Some(c) = tail.chars().nth(0) { if let Some(c) = tail.chars().next() {
// For error reporting, we would like the span to contain the character that was not // For error reporting, we would like the span to contain the character that was not
// skipped. The +1 is necessary to account for the leading \ that started the escape. // skipped. The +1 is necessary to account for the leading \ that started the escape.
let end = start + first_non_space + c.len_utf8() + 1; let end = start + first_non_space + c.len_utf8() + 1;

View file

@ -0,0 +1,106 @@
use crate::{context::LintContext, LateContext, LateLintPass};
use rustc_hir as hir;
use rustc_middle::ty::{fold::TypeFoldable, Ty};
use rustc_span::{symbol::sym, Span};
declare_lint! {
/// The `enum_intrinsics_non_enums` lint detects calls to
/// intrinsic functions that require an enum ([`core::mem::discriminant`],
/// [`core::mem::variant_count`]), but are called with a non-enum type.
///
/// [`core::mem::discriminant`]: https://doc.rust-lang.org/core/mem/fn.discriminant.html
/// [`core::mem::variant_count`]: https://doc.rust-lang.org/core/mem/fn.variant_count.html
///
/// ### Example
///
/// ```rust,compile_fail
/// #![deny(enum_intrinsics_non_enums)]
/// core::mem::discriminant::<i32>(&123);
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// In order to accept any enum, the `mem::discriminant` and
/// `mem::variant_count` functions are generic over a type `T`.
/// This makes it technically possible for `T` to be a non-enum,
/// in which case the return value is unspecified.
///
/// This lint prevents such incorrect usage of these functions.
ENUM_INTRINSICS_NON_ENUMS,
Deny,
"detects calls to `core::mem::discriminant` and `core::mem::variant_count` with non-enum types"
}
declare_lint_pass!(EnumIntrinsicsNonEnums => [ENUM_INTRINSICS_NON_ENUMS]);
/// Returns `true` if we know for sure that the given type is not an enum. Note that for cases where
/// the type is generic, we can't be certain if it will be an enum so we have to assume that it is.
fn is_non_enum(t: Ty<'_>) -> bool {
!t.is_enum() && !t.potentially_needs_subst()
}
fn enforce_mem_discriminant(
cx: &LateContext<'_>,
func_expr: &hir::Expr<'_>,
expr_span: Span,
args_span: Span,
) {
let ty_param = cx.typeck_results().node_substs(func_expr.hir_id).type_at(0);
if is_non_enum(ty_param) {
cx.struct_span_lint(ENUM_INTRINSICS_NON_ENUMS, expr_span, |builder| {
builder
.build(
"the return value of `mem::discriminant` is \
unspecified when called with a non-enum type",
)
.span_note(
args_span,
&format!(
"the argument to `discriminant` should be a \
reference to an enum, but it was passed \
a reference to a `{}`, which is not an enum.",
ty_param,
),
)
.emit();
});
}
}
fn enforce_mem_variant_count(cx: &LateContext<'_>, func_expr: &hir::Expr<'_>, span: Span) {
let ty_param = cx.typeck_results().node_substs(func_expr.hir_id).type_at(0);
if is_non_enum(ty_param) {
cx.struct_span_lint(ENUM_INTRINSICS_NON_ENUMS, span, |builder| {
builder
.build(
"the return value of `mem::variant_count` is \
unspecified when called with a non-enum type",
)
.note(&format!(
"the type parameter of `variant_count` should \
be an enum, but it was instantiated with \
the type `{}`, which is not an enum.",
ty_param,
))
.emit();
});
}
}
impl<'tcx> LateLintPass<'tcx> for EnumIntrinsicsNonEnums {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
if let hir::ExprKind::Call(ref func, ref args) = expr.kind {
if let hir::ExprKind::Path(ref qpath) = func.kind {
if let Some(def_id) = cx.qpath_res(qpath, func.hir_id).opt_def_id() {
if cx.tcx.is_diagnostic_item(sym::mem_discriminant, def_id) {
enforce_mem_discriminant(cx, func, expr.span, args[0].span);
} else if cx.tcx.is_diagnostic_item(sym::mem_variant_count, def_id) {
enforce_mem_variant_count(cx, func, expr.span);
}
}
}
}
}
}

View file

@ -1,7 +1,6 @@
use crate::context::{CheckLintNameResult, LintStore}; use crate::context::{CheckLintNameResult, LintStore};
use crate::late::unerased_lint_store; use crate::late::unerased_lint_store;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::unwrap_or;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder}; use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder};
@ -233,7 +232,10 @@ impl<'s> LintLevelsBuilder<'s> {
Some(lvl) => lvl, Some(lvl) => lvl,
}; };
let mut metas = unwrap_or!(attr.meta_item_list(), continue); let mut metas = match attr.meta_item_list() {
Some(x) => x,
None => continue,
};
if metas.is_empty() { if metas.is_empty() {
// FIXME (#55112): issue unused-attributes lint for `#[level()]` // FIXME (#55112): issue unused-attributes lint for `#[level()]`

View file

@ -47,6 +47,7 @@ mod array_into_iter;
pub mod builtin; pub mod builtin;
mod context; mod context;
mod early; mod early;
mod enum_intrinsics_non_enums;
mod internal; mod internal;
mod late; mod late;
mod levels; mod levels;
@ -76,6 +77,7 @@ use rustc_span::Span;
use array_into_iter::ArrayIntoIter; use array_into_iter::ArrayIntoIter;
use builtin::*; use builtin::*;
use enum_intrinsics_non_enums::EnumIntrinsicsNonEnums;
use internal::*; use internal::*;
use methods::*; use methods::*;
use non_ascii_idents::*; use non_ascii_idents::*;
@ -168,6 +170,7 @@ macro_rules! late_lint_passes {
TemporaryCStringAsPtr: TemporaryCStringAsPtr, TemporaryCStringAsPtr: TemporaryCStringAsPtr,
NonPanicFmt: NonPanicFmt, NonPanicFmt: NonPanicFmt,
NoopMethodCall: NoopMethodCall, NoopMethodCall: NoopMethodCall,
EnumIntrinsicsNonEnums: EnumIntrinsicsNonEnums,
InvalidAtomicOrdering: InvalidAtomicOrdering, InvalidAtomicOrdering: InvalidAtomicOrdering,
NamedAsmLabels: NamedAsmLabels, NamedAsmLabels: NamedAsmLabels,
] ]

View file

@ -230,8 +230,7 @@ fn check_panic_str<'tcx>(
Err(_) => (None, None), Err(_) => (None, None),
}; };
let mut fmt_parser = let mut fmt_parser = Parser::new(fmt, style, snippet.clone(), false, ParseMode::Format);
Parser::new(fmt.as_ref(), style, snippet.clone(), false, ParseMode::Format);
let n_arguments = (&mut fmt_parser).filter(|a| matches!(a, Piece::NextArgument(_))).count(); let n_arguments = (&mut fmt_parser).filter(|a| matches!(a, Piece::NextArgument(_))).count();
if n_arguments > 0 && fmt_parser.errors.is_empty() { if n_arguments > 0 && fmt_parser.errors.is_empty() {

View file

@ -6,6 +6,7 @@
use crate::{declare_lint, declare_lint_pass, FutureIncompatibilityReason}; use crate::{declare_lint, declare_lint_pass, FutureIncompatibilityReason};
use rustc_span::edition::Edition; use rustc_span::edition::Edition;
use rustc_span::symbol::sym;
declare_lint! { declare_lint! {
/// The `forbidden_lint_groups` lint detects violations of /// The `forbidden_lint_groups` lint detects violations of
@ -3476,6 +3477,8 @@ declare_lint! {
/// } /// }
/// ///
/// // in crate B /// // in crate B
/// #![feature(non_exhaustive_omitted_patterns_lint)]
///
/// match Bar::A { /// match Bar::A {
/// Bar::A => {}, /// Bar::A => {},
/// #[warn(non_exhaustive_omitted_patterns)] /// #[warn(non_exhaustive_omitted_patterns)]
@ -3512,6 +3515,7 @@ declare_lint! {
pub NON_EXHAUSTIVE_OMITTED_PATTERNS, pub NON_EXHAUSTIVE_OMITTED_PATTERNS,
Allow, Allow,
"detect when patterns of types marked `non_exhaustive` are missed", "detect when patterns of types marked `non_exhaustive` are missed",
@feature_gate = sym::non_exhaustive_omitted_patterns_lint;
} }
declare_lint! { declare_lint! {

View file

@ -24,8 +24,7 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
} }
if meta.path().is_ident("project") { if meta.path().is_ident("project") {
if let Meta::List(list) = meta { if let Meta::List(list) = meta {
if let Some(nested) = list.nested.iter().next() { if let Some(NestedMeta::Meta(meta)) = list.nested.iter().next() {
if let NestedMeta::Meta(meta) = nested {
attrs.project = meta.path().get_ident().cloned(); attrs.project = meta.path().get_ident().cloned();
any_attr = true; any_attr = true;
} }
@ -34,7 +33,6 @@ fn parse_attributes(field: &syn::Field) -> Attributes {
} }
} }
} }
}
if !any_attr { if !any_attr {
panic!("error parsing stable_hasher"); panic!("error parsing stable_hasher");
} }

View file

@ -349,14 +349,14 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> { ) -> Result<proc_macro2::TokenStream, SessionDiagnosticDeriveError> {
let field_binding = &info.binding.binding; let field_binding = &info.binding.binding;
let option_ty = option_inner_ty(&info.ty); let option_ty = option_inner_ty(info.ty);
let generated_code = self.generate_non_option_field_code( let generated_code = self.generate_non_option_field_code(
attr, attr,
FieldInfo { FieldInfo {
vis: info.vis, vis: info.vis,
binding: info.binding, binding: info.binding,
ty: option_ty.unwrap_or(&info.ty), ty: option_ty.unwrap_or(info.ty),
span: info.span, span: info.span,
}, },
)?; )?;
@ -388,7 +388,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
let formatted_str = self.build_format(&s.value(), attr.span()); let formatted_str = self.build_format(&s.value(), attr.span());
match name { match name {
"message" => { "message" => {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) { if type_matches_path(info.ty, &["rustc_span", "Span"]) {
quote! { quote! {
#diag.set_span(*#field_binding); #diag.set_span(*#field_binding);
#diag.set_primary_message(#formatted_str); #diag.set_primary_message(#formatted_str);
@ -401,7 +401,7 @@ impl<'a> SessionDiagnosticDeriveBuilder<'a> {
} }
} }
"label" => { "label" => {
if type_matches_path(&info.ty, &["rustc_span", "Span"]) { if type_matches_path(info.ty, &["rustc_span", "Span"]) {
quote! { quote! {
#diag.span_label(*#field_binding, #formatted_str); #diag.span_label(*#field_binding, #formatted_str);
} }

View file

@ -363,7 +363,7 @@ impl Collector<'tcx> {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if existing.is_empty() { if existing.is_empty() {
// Add if not found // Add if not found
let new_name = passed_lib.new_name.as_ref().map(|s| &**s); // &Option<String> -> Option<&str> let new_name: Option<&str> = passed_lib.new_name.as_deref();
let lib = NativeLib { let lib = NativeLib {
name: Some(Symbol::intern(new_name.unwrap_or(&passed_lib.name))), name: Some(Symbol::intern(new_name.unwrap_or(&passed_lib.name))),
kind: passed_lib.kind, kind: passed_lib.kind,

View file

@ -63,6 +63,7 @@ use rustc_data_structures::fingerprint::Fingerprint;
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX}; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX};
use rustc_hir::definitions::DefPathHash; use rustc_hir::definitions::DefPathHash;
use rustc_hir::HirId; use rustc_hir::HirId;
use rustc_query_system::dep_graph::FingerprintStyle;
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use std::hash::Hash; use std::hash::Hash;
@ -89,9 +90,9 @@ pub struct DepKindStruct {
/// Whether the query key can be recovered from the hashed fingerprint. /// Whether the query key can be recovered from the hashed fingerprint.
/// See [DepNodeParams] trait for the behaviour of each key type. /// See [DepNodeParams] trait for the behaviour of each key type.
// FIXME: Make this a simple boolean once DepNodeParams::can_reconstruct_query_key // FIXME: Make this a simple boolean once DepNodeParams::fingerprint_style
// can be made a specialized associated const. // can be made a specialized associated const.
can_reconstruct_query_key: fn() -> bool, fingerprint_style: fn() -> FingerprintStyle,
} }
impl std::ops::Deref for DepKind { impl std::ops::Deref for DepKind {
@ -103,14 +104,14 @@ impl std::ops::Deref for DepKind {
impl DepKind { impl DepKind {
#[inline(always)] #[inline(always)]
pub fn can_reconstruct_query_key(&self) -> bool { pub fn fingerprint_style(&self) -> FingerprintStyle {
// Only fetch the DepKindStruct once. // Only fetch the DepKindStruct once.
let data: &DepKindStruct = &**self; let data: &DepKindStruct = &**self;
if data.is_anon { if data.is_anon {
return false; return FingerprintStyle::Opaque;
} }
(data.can_reconstruct_query_key)() (data.fingerprint_style)()
} }
} }
@ -151,6 +152,7 @@ macro_rules! contains_eval_always_attr {
pub mod dep_kind { pub mod dep_kind {
use super::*; use super::*;
use crate::ty::query::query_keys; use crate::ty::query::query_keys;
use rustc_query_system::dep_graph::FingerprintStyle;
// We use this for most things when incr. comp. is turned off. // We use this for most things when incr. comp. is turned off.
pub const Null: DepKindStruct = DepKindStruct { pub const Null: DepKindStruct = DepKindStruct {
@ -158,7 +160,7 @@ pub mod dep_kind {
is_anon: false, is_anon: false,
is_eval_always: false, is_eval_always: false,
can_reconstruct_query_key: || true, fingerprint_style: || FingerprintStyle::Unit,
}; };
pub const TraitSelect: DepKindStruct = DepKindStruct { pub const TraitSelect: DepKindStruct = DepKindStruct {
@ -166,7 +168,7 @@ pub mod dep_kind {
is_anon: true, is_anon: true,
is_eval_always: false, is_eval_always: false,
can_reconstruct_query_key: || true, fingerprint_style: || FingerprintStyle::Unit,
}; };
pub const CompileCodegenUnit: DepKindStruct = DepKindStruct { pub const CompileCodegenUnit: DepKindStruct = DepKindStruct {
@ -174,7 +176,7 @@ pub mod dep_kind {
is_anon: false, is_anon: false,
is_eval_always: false, is_eval_always: false,
can_reconstruct_query_key: || false, fingerprint_style: || FingerprintStyle::Opaque,
}; };
pub const CompileMonoItem: DepKindStruct = DepKindStruct { pub const CompileMonoItem: DepKindStruct = DepKindStruct {
@ -182,7 +184,7 @@ pub mod dep_kind {
is_anon: false, is_anon: false,
is_eval_always: false, is_eval_always: false,
can_reconstruct_query_key: || false, fingerprint_style: || FingerprintStyle::Opaque,
}; };
macro_rules! define_query_dep_kinds { macro_rules! define_query_dep_kinds {
@ -196,16 +198,16 @@ pub mod dep_kind {
const is_eval_always: bool = contains_eval_always_attr!($($attrs)*); const is_eval_always: bool = contains_eval_always_attr!($($attrs)*);
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> rustc_query_system::dep_graph::FingerprintStyle {
<query_keys::$variant<'_> as DepNodeParams<TyCtxt<'_>>> <query_keys::$variant<'_> as DepNodeParams<TyCtxt<'_>>>
::can_reconstruct_query_key() ::fingerprint_style()
} }
DepKindStruct { DepKindStruct {
has_params, has_params,
is_anon, is_anon,
is_eval_always, is_eval_always,
can_reconstruct_query_key, fingerprint_style,
} }
};)* };)*
); );
@ -320,7 +322,7 @@ impl DepNodeExt for DepNode {
/// method will assert that the given DepKind actually requires a /// method will assert that the given DepKind actually requires a
/// single DefId/DefPathHash parameter. /// single DefId/DefPathHash parameter.
fn from_def_path_hash(def_path_hash: DefPathHash, kind: DepKind) -> DepNode { fn from_def_path_hash(def_path_hash: DefPathHash, kind: DepKind) -> DepNode {
debug_assert!(kind.can_reconstruct_query_key() && kind.has_params); debug_assert!(kind.fingerprint_style() == FingerprintStyle::DefPathHash);
DepNode { kind, hash: def_path_hash.0.into() } DepNode { kind, hash: def_path_hash.0.into() }
} }
@ -335,7 +337,7 @@ impl DepNodeExt for DepNode {
/// refers to something from the previous compilation session that /// refers to something from the previous compilation session that
/// has been removed. /// has been removed.
fn extract_def_id(&self, tcx: TyCtxt<'tcx>) -> Option<DefId> { fn extract_def_id(&self, tcx: TyCtxt<'tcx>) -> Option<DefId> {
if self.kind.can_reconstruct_query_key() { if self.kind.fingerprint_style() == FingerprintStyle::DefPathHash {
Some( Some(
tcx.on_disk_cache tcx.on_disk_cache
.as_ref()? .as_ref()?
@ -350,14 +352,16 @@ impl DepNodeExt for DepNode {
fn from_label_string(label: &str, def_path_hash: DefPathHash) -> Result<DepNode, ()> { fn from_label_string(label: &str, def_path_hash: DefPathHash) -> Result<DepNode, ()> {
let kind = dep_kind_from_label_string(label)?; let kind = dep_kind_from_label_string(label)?;
if !kind.can_reconstruct_query_key() { match kind.fingerprint_style() {
return Err(()); FingerprintStyle::Opaque => Err(()),
} FingerprintStyle::Unit => {
if !kind.has_params {
if kind.has_params {
Ok(DepNode::from_def_path_hash(def_path_hash, kind))
} else {
Ok(DepNode::new_no_params(kind)) Ok(DepNode::new_no_params(kind))
} else {
Err(())
}
}
FingerprintStyle::DefPathHash => Ok(DepNode::from_def_path_hash(def_path_hash, kind)),
} }
} }
@ -369,8 +373,8 @@ impl DepNodeExt for DepNode {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for () { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for () {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
true FingerprintStyle::Unit
} }
fn to_fingerprint(&self, _: TyCtxt<'tcx>) -> Fingerprint { fn to_fingerprint(&self, _: TyCtxt<'tcx>) -> Fingerprint {
@ -384,8 +388,8 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for () {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for DefId { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for DefId {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
true FingerprintStyle::DefPathHash
} }
fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint { fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
@ -403,8 +407,8 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for DefId {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for LocalDefId { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for LocalDefId {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
true FingerprintStyle::DefPathHash
} }
fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint { fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
@ -422,8 +426,8 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for LocalDefId {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for CrateNum { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for CrateNum {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
true FingerprintStyle::DefPathHash
} }
fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint { fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
@ -442,8 +446,8 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for CrateNum {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for (DefId, DefId) { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for (DefId, DefId) {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
false FingerprintStyle::Opaque
} }
// We actually would not need to specialize the implementation of this // We actually would not need to specialize the implementation of this
@ -467,8 +471,8 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for (DefId, DefId) {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for HirId { impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for HirId {
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
false FingerprintStyle::Opaque
} }
// We actually would not need to specialize the implementation of this // We actually would not need to specialize the implementation of this

View file

@ -25,8 +25,8 @@ impl rustc_query_system::dep_graph::DepKind for DepKind {
const NULL: Self = DepKind::Null; const NULL: Self = DepKind::Null;
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key(&self) -> bool { fn fingerprint_style(&self) -> rustc_query_system::dep_graph::FingerprintStyle {
DepKind::can_reconstruct_query_key(self) DepKind::fingerprint_style(self)
} }
#[inline(always)] #[inline(always)]

View file

@ -264,14 +264,14 @@ impl EvaluationResult {
/// Indicates that trait evaluation caused overflow and in which pass. /// Indicates that trait evaluation caused overflow and in which pass.
#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable)] #[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable)]
pub enum OverflowError { pub enum OverflowError {
Cannonical, Canonical,
ErrorReporting, ErrorReporting,
} }
impl<'tcx> From<OverflowError> for SelectionError<'tcx> { impl<'tcx> From<OverflowError> for SelectionError<'tcx> {
fn from(overflow_error: OverflowError) -> SelectionError<'tcx> { fn from(overflow_error: OverflowError) -> SelectionError<'tcx> {
match overflow_error { match overflow_error {
OverflowError::Cannonical => SelectionError::Overflow, OverflowError::Canonical => SelectionError::Overflow,
OverflowError::ErrorReporting => SelectionError::ErrorReporting, OverflowError::ErrorReporting => SelectionError::ErrorReporting,
} }
} }

View file

@ -986,7 +986,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
let niche = if def.repr.hide_niche() { let niche = if def.repr.hide_niche() {
None None
} else { } else {
Niche::from_scalar(dl, Size::ZERO, scalar.clone()) Niche::from_scalar(dl, Size::ZERO, *scalar)
}; };
if let Some(niche) = niche { if let Some(niche) = niche {
match st.largest_niche { match st.largest_niche {
@ -2273,7 +2273,7 @@ where
) -> TyMaybeWithLayout<'tcx> { ) -> TyMaybeWithLayout<'tcx> {
let tcx = cx.tcx(); let tcx = cx.tcx();
let tag_layout = |tag: Scalar| -> TyAndLayout<'tcx> { let tag_layout = |tag: Scalar| -> TyAndLayout<'tcx> {
let layout = Layout::scalar(cx, tag.clone()); let layout = Layout::scalar(cx, tag);
TyAndLayout { layout: tcx.intern_layout(layout), ty: tag.value.to_ty(tcx) } TyAndLayout { layout: tcx.intern_layout(layout), ty: tag.value.to_ty(tcx) }
}; };
@ -3012,7 +3012,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
}; };
let target = &self.tcx.sess.target; let target = &self.tcx.sess.target;
let target_env_gnu_like = matches!(&target.env[..], "gnu" | "musl"); let target_env_gnu_like = matches!(&target.env[..], "gnu" | "musl" | "uclibc");
let win_x64_gnu = target.os == "windows" && target.arch == "x86_64" && target.env == "gnu"; let win_x64_gnu = target.os == "windows" && target.arch == "x86_64" && target.env == "gnu";
let linux_s390x_gnu_like = let linux_s390x_gnu_like =
target.os == "linux" && target.arch == "s390x" && target_env_gnu_like; target.os == "linux" && target.arch == "s390x" && target_env_gnu_like;
@ -3110,7 +3110,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
if arg.layout.is_zst() { if arg.layout.is_zst() {
// For some forsaken reason, x86_64-pc-windows-gnu // For some forsaken reason, x86_64-pc-windows-gnu
// doesn't ignore zero-sized struct arguments. // doesn't ignore zero-sized struct arguments.
// The same is true for {s390x,sparc64,powerpc}-unknown-linux-{gnu,musl}. // The same is true for {s390x,sparc64,powerpc}-unknown-linux-{gnu,musl,uclibc}.
if is_return if is_return
|| rust_abi || rust_abi
|| (!win_x64_gnu || (!win_x64_gnu

View file

@ -130,7 +130,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::Call { TerminatorKind::Call {
func: exchange_malloc, func: exchange_malloc,
args: vec![Operand::Move(size), Operand::Move(align)], args: vec![Operand::Move(size), Operand::Move(align)],
destination: Some((Place::from(storage), success)), destination: Some((storage, success)),
cleanup: None, cleanup: None,
from_hir_call: false, from_hir_call: false,
fn_span: expr_span, fn_span: expr_span,
@ -153,7 +153,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} }
// Transmute `*mut u8` to the box (thus far, uninitialized): // Transmute `*mut u8` to the box (thus far, uninitialized):
let box_ = Rvalue::ShallowInitBox(Operand::Move(Place::from(storage)), value.ty); let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value.ty);
this.cfg.push_assign(block, source_info, Place::from(result), box_); this.cfg.push_assign(block, source_info, Place::from(result), box_);
// initialize the box contents: // initialize the box contents:

View file

@ -1068,9 +1068,7 @@ impl<'tcx> SplitWildcard<'tcx> {
Missing { Missing {
nonexhaustive_enum_missing_real_variants: self nonexhaustive_enum_missing_real_variants: self
.iter_missing(pcx) .iter_missing(pcx)
.filter(|c| !c.is_non_exhaustive()) .any(|c| !c.is_non_exhaustive()),
.next()
.is_some(),
} }
} else { } else {
Missing { nonexhaustive_enum_missing_real_variants: false } Missing { nonexhaustive_enum_missing_real_variants: false }

View file

@ -289,7 +289,7 @@ impl<'tcx> RustcPeekAt<'tcx> for MaybeMutBorrowedLocals<'_, 'tcx> {
flow_state: &BitSet<Local>, flow_state: &BitSet<Local>,
call: PeekCall, call: PeekCall,
) { ) {
warn!("peek_at: place={:?}", place); info!(?place, "peek_at");
let local = if let Some(l) = place.as_local() { let local = if let Some(l) = place.as_local() {
l l
} else { } else {
@ -311,7 +311,7 @@ impl<'tcx> RustcPeekAt<'tcx> for MaybeLiveLocals {
flow_state: &BitSet<Local>, flow_state: &BitSet<Local>,
call: PeekCall, call: PeekCall,
) { ) {
warn!("peek_at: place={:?}", place); info!(?place, "peek_at");
let local = if let Some(l) = place.as_local() { let local = if let Some(l) = place.as_local() {
l l
} else { } else {

View file

@ -263,7 +263,7 @@ impl<'a, 'tcx> Helper<'a, 'tcx> {
} }
// check that the value being matched on is the same. The // check that the value being matched on is the same. The
if this_bb_discr_info.targets_with_values.iter().find(|x| x.0 == value).is_none() { if !this_bb_discr_info.targets_with_values.iter().any(|x| x.0 == value) {
trace!("NO: values being matched on are not the same"); trace!("NO: values being matched on are not the same");
return None; return None;
} }

View file

@ -111,8 +111,7 @@ impl<'a, 'tcx> Patcher<'a, 'tcx> {
Operand::Copy(place) | Operand::Move(place) => { Operand::Copy(place) | Operand::Move(place) => {
// create new local // create new local
let ty = operand.ty(self.local_decls, self.tcx); let ty = operand.ty(self.local_decls, self.tcx);
let local_decl = let local_decl = LocalDecl::with_source_info(ty, statement.source_info);
LocalDecl::with_source_info(ty, statement.source_info.clone());
let local = self.local_decls.push(local_decl); let local = self.local_decls.push(local_decl);
// make it live // make it live
let mut make_live_statement = statement.clone(); let mut make_live_statement = statement.clone();

View file

@ -1096,7 +1096,7 @@ impl<'a> Parser<'a> {
(Err(ref mut err), Some((mut snapshot, ExprKind::Path(None, path)))) => { (Err(ref mut err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
let name = pprust::path_to_string(&path); let name = pprust::path_to_string(&path);
snapshot.bump(); // `(` snapshot.bump(); // `(`
match snapshot.parse_struct_fields(path.clone(), false, token::Paren) { match snapshot.parse_struct_fields(path, false, token::Paren) {
Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => { Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => {
// We have are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // We have are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.

View file

@ -1767,8 +1767,7 @@ impl CheckAttrVisitor<'tcx> {
fn check_macro_export(&self, hir_id: HirId, attr: &Attribute, target: Target) { fn check_macro_export(&self, hir_id: HirId, attr: &Attribute, target: Target) {
if target != Target::MacroDef { if target != Target::MacroDef {
self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, attr.span, |lint| { self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, attr.span, |lint| {
lint.build(&format!("`#[macro_export]` only has an effect on macro definitions")) lint.build("`#[macro_export]` only has an effect on macro definitions").emit();
.emit();
}); });
} }
} }

View file

@ -8,7 +8,6 @@
//! through, but errors for structured control flow in a `const` should be emitted here. //! through, but errors for structured control flow in a `const` should be emitted here.
use rustc_attr as attr; use rustc_attr as attr;
use rustc_data_structures::stable_set::FxHashSet;
use rustc_errors::struct_span_err; use rustc_errors::struct_span_err;
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId; use rustc_hir::def_id::LocalDefId;
@ -83,30 +82,39 @@ impl<'tcx> hir::itemlikevisit::ItemLikeVisitor<'tcx> for CheckConstTraitVisitor<
let _: Option<_> = try { let _: Option<_> = try {
if let hir::ItemKind::Impl(ref imp) = item.kind { if let hir::ItemKind::Impl(ref imp) = item.kind {
if let hir::Constness::Const = imp.constness { if let hir::Constness::Const = imp.constness {
let did = imp.of_trait.as_ref()?.trait_def_id()?; let trait_def_id = imp.of_trait.as_ref()?.trait_def_id()?;
let mut to_implement = FxHashSet::default(); let ancestors = self
.tcx
.trait_def(trait_def_id)
.ancestors(self.tcx, item.def_id.to_def_id())
.ok()?;
let mut to_implement = Vec::new();
for did in self.tcx.associated_item_def_ids(did) { for trait_item in self.tcx.associated_items(trait_def_id).in_definition_order()
{
if let ty::AssocItem { if let ty::AssocItem {
kind: ty::AssocKind::Fn, ident, defaultness, .. kind: ty::AssocKind::Fn, ident, defaultness, ..
} = self.tcx.associated_item(*did) } = trait_item
{ {
// we can ignore functions that do not have default bodies: // we can ignore functions that do not have default bodies:
// if those are unimplemented it will be catched by typeck. // if those are unimplemented it will be catched by typeck.
if defaultness.has_value() if !defaultness.has_value()
&& !self.tcx.has_attr(*did, sym::default_method_body_is_const) || self
.tcx
.has_attr(trait_item.def_id, sym::default_method_body_is_const)
{ {
to_implement.insert(ident); continue;
}
}
} }
for it in imp let is_implemented = ancestors
.items .leaf_def(self.tcx, trait_item.ident, trait_item.kind)
.iter() .map(|node_item| !node_item.defining_node.is_from_trait())
.filter(|it| matches!(it.kind, hir::AssocItemKind::Fn { .. })) .unwrap_or(false);
{
to_implement.remove(&it.ident); if !is_implemented {
to_implement.push(ident.to_string());
}
}
} }
// all nonconst trait functions (not marked with #[default_method_body_is_const]) // all nonconst trait functions (not marked with #[default_method_body_is_const])
@ -118,7 +126,7 @@ impl<'tcx> hir::itemlikevisit::ItemLikeVisitor<'tcx> for CheckConstTraitVisitor<
item.span, item.span,
"const trait implementations may not use non-const default functions", "const trait implementations may not use non-const default functions",
) )
.note(&format!("`{}` not implemented", to_implement.into_iter().map(|id| id.to_string()).collect::<Vec<_>>().join("`, `"))) .note(&format!("`{}` not implemented", to_implement.join("`, `")))
.emit(); .emit();
} }
} }

View file

@ -141,6 +141,7 @@ impl ExprVisitor<'tcx> {
template: &[InlineAsmTemplatePiece], template: &[InlineAsmTemplatePiece],
is_input: bool, is_input: bool,
tied_input: Option<(&hir::Expr<'tcx>, Option<InlineAsmType>)>, tied_input: Option<(&hir::Expr<'tcx>, Option<InlineAsmType>)>,
target_features: &[Symbol],
) -> Option<InlineAsmType> { ) -> Option<InlineAsmType> {
// Check the type against the allowed types for inline asm. // Check the type against the allowed types for inline asm.
let ty = self.typeck_results.expr_ty_adjusted(expr); let ty = self.typeck_results.expr_ty_adjusted(expr);
@ -283,17 +284,20 @@ impl ExprVisitor<'tcx> {
}; };
// Check whether the selected type requires a target feature. Note that // Check whether the selected type requires a target feature. Note that
// this is different from the feature check we did earlier in AST // this is different from the feature check we did earlier. While the
// lowering. While AST lowering checked that this register class is // previous check checked that this register class is usable at all
// usable at all with the currently enabled features, some types may // with the currently enabled features, some types may only be usable
// only be usable with a register class when a certain feature is // with a register class when a certain feature is enabled. We check
// enabled. We check this here since it depends on the results of typeck. // this here since it depends on the results of typeck.
// //
// Also note that this check isn't run when the operand type is never // Also note that this check isn't run when the operand type is never
// (!). In that case we still need the earlier check in AST lowering to // (!). In that case we still need the earlier check to verify that the
// verify that the register class is usable at all. // register class is usable at all.
if let Some(feature) = feature { if let Some(feature) = feature {
if !self.tcx.sess.target_features.contains(&Symbol::intern(feature)) { let feat_sym = Symbol::intern(feature);
if !self.tcx.sess.target_features.contains(&feat_sym)
&& !target_features.contains(&feat_sym)
{
let msg = &format!("`{}` target feature is not enabled", feature); let msg = &format!("`{}` target feature is not enabled", feature);
let mut err = self.tcx.sess.struct_span_err(expr.span, msg); let mut err = self.tcx.sess.struct_span_err(expr.span, msg);
err.note(&format!( err.note(&format!(
@ -349,23 +353,122 @@ impl ExprVisitor<'tcx> {
Some(asm_ty) Some(asm_ty)
} }
fn check_asm(&self, asm: &hir::InlineAsm<'tcx>) { fn check_asm(&self, asm: &hir::InlineAsm<'tcx>, hir_id: hir::HirId) {
for (idx, (op, _)) in asm.operands.iter().enumerate() { let hir = self.tcx.hir();
let enclosing_id = hir.enclosing_body_owner(hir_id);
let enclosing_def_id = hir.local_def_id(enclosing_id).to_def_id();
let attrs = self.tcx.codegen_fn_attrs(enclosing_def_id);
for (idx, (op, op_sp)) in asm.operands.iter().enumerate() {
// Validate register classes against currently enabled target
// features. We check that at least one type is available for
// the enabled features.
//
// We ignore target feature requirements for clobbers: if the
// feature is disabled then the compiler doesn't care what we
// do with the registers.
//
// Note that this is only possible for explicit register
// operands, which cannot be used in the asm string.
if let Some(reg) = op.reg() {
if !op.is_clobber() {
let mut missing_required_features = vec![];
let reg_class = reg.reg_class();
for &(_, feature) in reg_class.supported_types(self.tcx.sess.asm_arch.unwrap())
{
match feature {
Some(feature) => {
let feat_sym = Symbol::intern(feature);
if self.tcx.sess.target_features.contains(&feat_sym)
|| attrs.target_features.contains(&feat_sym)
{
missing_required_features.clear();
break;
} else {
missing_required_features.push(feature);
}
}
None => {
missing_required_features.clear();
break;
}
}
}
// We are sorting primitive strs here and can use unstable sort here
missing_required_features.sort_unstable();
missing_required_features.dedup();
match &missing_required_features[..] {
[] => {}
[feature] => {
let msg = format!(
"register class `{}` requires the `{}` target feature",
reg_class.name(),
feature
);
self.tcx.sess.struct_span_err(*op_sp, &msg).emit();
// register isn't enabled, don't do more checks
continue;
}
features => {
let msg = format!(
"register class `{}` requires at least one of the following target features: {}",
reg_class.name(),
features.join(", ")
);
self.tcx.sess.struct_span_err(*op_sp, &msg).emit();
// register isn't enabled, don't do more checks
continue;
}
}
}
}
match *op { match *op {
hir::InlineAsmOperand::In { reg, ref expr } => { hir::InlineAsmOperand::In { reg, ref expr } => {
self.check_asm_operand_type(idx, reg, expr, asm.template, true, None); self.check_asm_operand_type(
idx,
reg,
expr,
asm.template,
true,
None,
&attrs.target_features,
);
} }
hir::InlineAsmOperand::Out { reg, late: _, ref expr } => { hir::InlineAsmOperand::Out { reg, late: _, ref expr } => {
if let Some(expr) = expr { if let Some(expr) = expr {
self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); self.check_asm_operand_type(
idx,
reg,
expr,
asm.template,
false,
None,
&attrs.target_features,
);
} }
} }
hir::InlineAsmOperand::InOut { reg, late: _, ref expr } => { hir::InlineAsmOperand::InOut { reg, late: _, ref expr } => {
self.check_asm_operand_type(idx, reg, expr, asm.template, false, None); self.check_asm_operand_type(
idx,
reg,
expr,
asm.template,
false,
None,
&attrs.target_features,
);
} }
hir::InlineAsmOperand::SplitInOut { reg, late: _, ref in_expr, ref out_expr } => { hir::InlineAsmOperand::SplitInOut { reg, late: _, ref in_expr, ref out_expr } => {
let in_ty = let in_ty = self.check_asm_operand_type(
self.check_asm_operand_type(idx, reg, in_expr, asm.template, true, None); idx,
reg,
in_expr,
asm.template,
true,
None,
&attrs.target_features,
);
if let Some(out_expr) = out_expr { if let Some(out_expr) = out_expr {
self.check_asm_operand_type( self.check_asm_operand_type(
idx, idx,
@ -374,6 +477,7 @@ impl ExprVisitor<'tcx> {
asm.template, asm.template,
false, false,
Some((in_expr, in_ty)), Some((in_expr, in_ty)),
&attrs.target_features,
); );
} }
} }
@ -422,7 +526,7 @@ impl Visitor<'tcx> for ExprVisitor<'tcx> {
} }
} }
hir::ExprKind::InlineAsm(asm) => self.check_asm(asm), hir::ExprKind::InlineAsm(asm) => self.check_asm(asm, expr.hir_id),
_ => {} _ => {}
} }

View file

@ -428,6 +428,7 @@ macro_rules! define_queries {
use rustc_middle::ty::query::query_keys; use rustc_middle::ty::query::query_keys;
use rustc_query_system::dep_graph::DepNodeParams; use rustc_query_system::dep_graph::DepNodeParams;
use rustc_query_system::query::{force_query, QueryDescription}; use rustc_query_system::query::{force_query, QueryDescription};
use rustc_query_system::dep_graph::FingerprintStyle;
// We use this for most things when incr. comp. is turned off. // We use this for most things when incr. comp. is turned off.
pub const Null: QueryStruct = QueryStruct { pub const Null: QueryStruct = QueryStruct {
@ -454,9 +455,9 @@ macro_rules! define_queries {
const is_anon: bool = is_anon!([$($modifiers)*]); const is_anon: bool = is_anon!([$($modifiers)*]);
#[inline(always)] #[inline(always)]
fn can_reconstruct_query_key() -> bool { fn fingerprint_style() -> FingerprintStyle {
<query_keys::$name<'_> as DepNodeParams<TyCtxt<'_>>> <query_keys::$name<'_> as DepNodeParams<TyCtxt<'_>>>
::can_reconstruct_query_key() ::fingerprint_style()
} }
fn recover<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<query_keys::$name<'tcx>> { fn recover<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<query_keys::$name<'tcx>> {
@ -472,7 +473,7 @@ macro_rules! define_queries {
return return
} }
if !can_reconstruct_query_key() { if !fingerprint_style().reconstructible() {
return return
} }

View file

@ -5,8 +5,6 @@ use rustc_query_system::query::{QueryCache, QueryCacheStore};
use std::any::type_name; use std::any::type_name;
use std::mem; use std::mem;
#[cfg(debug_assertions)]
use std::sync::atomic::Ordering;
trait KeyStats { trait KeyStats {
fn key_stats(&self, stats: &mut QueryStats); fn key_stats(&self, stats: &mut QueryStats);
@ -27,7 +25,6 @@ impl KeyStats for DefId {
#[derive(Clone)] #[derive(Clone)]
struct QueryStats { struct QueryStats {
name: &'static str, name: &'static str,
cache_hits: usize,
key_size: usize, key_size: usize,
key_type: &'static str, key_type: &'static str,
value_size: usize, value_size: usize,
@ -42,10 +39,6 @@ where
{ {
let mut stats = QueryStats { let mut stats = QueryStats {
name, name,
#[cfg(debug_assertions)]
cache_hits: map.cache_hits.load(Ordering::Relaxed),
#[cfg(not(debug_assertions))]
cache_hits: 0,
key_size: mem::size_of::<C::Key>(), key_size: mem::size_of::<C::Key>(),
key_type: type_name::<C::Key>(), key_type: type_name::<C::Key>(),
value_size: mem::size_of::<C::Value>(), value_size: mem::size_of::<C::Value>(),
@ -63,12 +56,6 @@ where
pub fn print_stats(tcx: TyCtxt<'_>) { pub fn print_stats(tcx: TyCtxt<'_>) {
let queries = query_stats(tcx); let queries = query_stats(tcx);
if cfg!(debug_assertions) {
let hits: usize = queries.iter().map(|s| s.cache_hits).sum();
let results: usize = queries.iter().map(|s| s.entry_count).sum();
eprintln!("\nQuery cache hit rate: {}", hits as f64 / (hits + results) as f64);
}
let mut query_key_sizes = queries.clone(); let mut query_key_sizes = queries.clone();
query_key_sizes.sort_by_key(|q| q.key_size); query_key_sizes.sort_by_key(|q| q.key_size);
eprintln!("\nLarge query keys:"); eprintln!("\nLarge query keys:");
@ -83,20 +70,6 @@ pub fn print_stats(tcx: TyCtxt<'_>) {
eprintln!(" {} - {} x {} - {}", q.name, q.value_size, q.entry_count, q.value_type); eprintln!(" {} - {} x {} - {}", q.name, q.value_size, q.entry_count, q.value_type);
} }
if cfg!(debug_assertions) {
let mut query_cache_hits = queries.clone();
query_cache_hits.sort_by_key(|q| q.cache_hits);
eprintln!("\nQuery cache hits:");
for q in query_cache_hits.iter().rev() {
eprintln!(
" {} - {} ({}%)",
q.name,
q.cache_hits,
q.cache_hits as f64 / (q.cache_hits + q.entry_count) as f64
);
}
}
let mut query_value_count = queries.clone(); let mut query_value_count = queries.clone();
query_value_count.sort_by_key(|q| q.entry_count); query_value_count.sort_by_key(|q| q.entry_count);
eprintln!("\nQuery value count:"); eprintln!("\nQuery value count:");

View file

@ -42,7 +42,7 @@
//! `DefId` it was computed from. In other cases, too much information gets //! `DefId` it was computed from. In other cases, too much information gets
//! lost during fingerprint computation. //! lost during fingerprint computation.
use super::{DepContext, DepKind}; use super::{DepContext, DepKind, FingerprintStyle};
use crate::ich::StableHashingContext; use crate::ich::StableHashingContext;
use rustc_data_structures::fingerprint::{Fingerprint, PackedFingerprint}; use rustc_data_structures::fingerprint::{Fingerprint, PackedFingerprint};
@ -75,7 +75,7 @@ impl<K: DepKind> DepNode<K> {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
{ {
if !kind.can_reconstruct_query_key() if !kind.fingerprint_style().reconstructible()
&& (tcx.sess().opts.debugging_opts.incremental_info && (tcx.sess().opts.debugging_opts.incremental_info
|| tcx.sess().opts.debugging_opts.query_dep_graph) || tcx.sess().opts.debugging_opts.query_dep_graph)
{ {
@ -94,7 +94,7 @@ impl<K: DepKind> fmt::Debug for DepNode<K> {
} }
pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized { pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized {
fn can_reconstruct_query_key() -> bool; fn fingerprint_style() -> FingerprintStyle;
/// This method turns the parameters of a DepNodeConstructor into an opaque /// This method turns the parameters of a DepNodeConstructor into an opaque
/// Fingerprint to be used in DepNode. /// Fingerprint to be used in DepNode.
@ -111,7 +111,7 @@ pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized {
/// This method tries to recover the query key from the given `DepNode`, /// This method tries to recover the query key from the given `DepNode`,
/// something which is needed when forcing `DepNode`s during red-green /// something which is needed when forcing `DepNode`s during red-green
/// evaluation. The query system will only call this method if /// evaluation. The query system will only call this method if
/// `can_reconstruct_query_key()` is `true`. /// `fingerprint_style()` is not `FingerprintStyle::Opaque`.
/// It is always valid to return `None` here, in which case incremental /// It is always valid to return `None` here, in which case incremental
/// compilation will treat the query as having changed instead of forcing it. /// compilation will treat the query as having changed instead of forcing it.
fn recover(tcx: Ctxt, dep_node: &DepNode<Ctxt::DepKind>) -> Option<Self>; fn recover(tcx: Ctxt, dep_node: &DepNode<Ctxt::DepKind>) -> Option<Self>;
@ -122,8 +122,8 @@ where
T: for<'a> HashStable<StableHashingContext<'a>> + fmt::Debug, T: for<'a> HashStable<StableHashingContext<'a>> + fmt::Debug,
{ {
#[inline] #[inline]
default fn can_reconstruct_query_key() -> bool { default fn fingerprint_style() -> FingerprintStyle {
false FingerprintStyle::Opaque
} }
default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint { default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {

View file

@ -50,6 +50,27 @@ impl<T: DepContext> HasDepContext for T {
} }
} }
/// Describes the contents of the fingerprint generated by a given query.
#[derive(PartialEq, Eq, Copy, Clone)]
pub enum FingerprintStyle {
/// The fingerprint is actually a DefPathHash.
DefPathHash,
/// Query key was `()` or equivalent, so fingerprint is just zero.
Unit,
/// Some opaque hash.
Opaque,
}
impl FingerprintStyle {
#[inline]
pub fn reconstructible(self) -> bool {
match self {
FingerprintStyle::DefPathHash | FingerprintStyle::Unit => true,
FingerprintStyle::Opaque => false,
}
}
}
/// Describe the different families of dependency nodes. /// Describe the different families of dependency nodes.
pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder> + 'static { pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder> + 'static {
const NULL: Self; const NULL: Self;
@ -73,5 +94,5 @@ pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder>
where where
OP: for<'a> FnOnce(Option<&'a Lock<TaskDeps<Self>>>); OP: for<'a> FnOnce(Option<&'a Lock<TaskDeps<Self>>>);
fn can_reconstruct_query_key(&self) -> bool; fn fingerprint_style(&self) -> FingerprintStyle;
} }

View file

@ -26,24 +26,15 @@ use std::hash::{Hash, Hasher};
use std::mem; use std::mem;
use std::num::NonZeroU32; use std::num::NonZeroU32;
use std::ptr; use std::ptr;
#[cfg(debug_assertions)]
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct QueryCacheStore<C: QueryCache> { pub struct QueryCacheStore<C: QueryCache> {
cache: C, cache: C,
shards: Sharded<C::Sharded>, shards: Sharded<C::Sharded>,
#[cfg(debug_assertions)]
pub cache_hits: AtomicUsize,
} }
impl<C: QueryCache + Default> Default for QueryCacheStore<C> { impl<C: QueryCache + Default> Default for QueryCacheStore<C> {
fn default() -> Self { fn default() -> Self {
Self { Self { cache: C::default(), shards: Default::default() }
cache: C::default(),
shards: Default::default(),
#[cfg(debug_assertions)]
cache_hits: AtomicUsize::new(0),
}
} }
} }
@ -377,10 +368,6 @@ where
if unlikely!(tcx.profiler().enabled()) { if unlikely!(tcx.profiler().enabled()) {
tcx.profiler().query_cache_hit(index.into()); tcx.profiler().query_cache_hit(index.into());
} }
#[cfg(debug_assertions)]
{
cache.cache_hits.fetch_add(1, Ordering::Relaxed);
}
tcx.dep_graph().read_index(index); tcx.dep_graph().read_index(index);
on_hit(value) on_hit(value)
}) })
@ -429,10 +416,6 @@ where
if unlikely!(tcx.dep_context().profiler().enabled()) { if unlikely!(tcx.dep_context().profiler().enabled()) {
tcx.dep_context().profiler().query_cache_hit(index.into()); tcx.dep_context().profiler().query_cache_hit(index.into());
} }
#[cfg(debug_assertions)]
{
cache.cache_hits.fetch_add(1, Ordering::Relaxed);
}
query_blocked_prof_timer.finish_with_query_invocation_id(index.into()); query_blocked_prof_timer.finish_with_query_invocation_id(index.into());
(v, Some(index)) (v, Some(index))
@ -540,7 +523,7 @@ where
// We always expect to find a cached result for things that // We always expect to find a cached result for things that
// can be forced from `DepNode`. // can be forced from `DepNode`.
debug_assert!( debug_assert!(
!dep_node.kind.can_reconstruct_query_key() || result.is_some(), !dep_node.kind.fingerprint_style().reconstructible() || result.is_some(),
"missing on-disk cache entry for {:?}", "missing on-disk cache entry for {:?}",
dep_node dep_node
); );
@ -705,10 +688,6 @@ where
if unlikely!(tcx.dep_context().profiler().enabled()) { if unlikely!(tcx.dep_context().profiler().enabled()) {
tcx.dep_context().profiler().query_cache_hit(index.into()); tcx.dep_context().profiler().query_cache_hit(index.into());
} }
#[cfg(debug_assertions)]
{
cache.cache_hits.fetch_add(1, Ordering::Relaxed);
}
}); });
let lookup = match cached { let lookup = match cached {
@ -778,7 +757,7 @@ where
return false; return false;
} }
if !<Q::Key as DepNodeParams<CTX::DepContext>>::can_reconstruct_query_key() { if !<Q::Key as DepNodeParams<CTX::DepContext>>::fingerprint_style().reconstructible() {
return false; return false;
} }

View file

@ -9,7 +9,6 @@ use crate::{BindingKey, ModuleKind, ResolutionError, Resolver, Segment};
use crate::{CrateLint, Module, ModuleOrUniformRoot, ParentScope, PerNS, ScopeSet, Weak}; use crate::{CrateLint, Module, ModuleOrUniformRoot, ParentScope, PerNS, ScopeSet, Weak};
use crate::{NameBinding, NameBindingKind, PathResult, PrivacyError, ToNameBinding}; use crate::{NameBinding, NameBindingKind, PathResult, PrivacyError, ToNameBinding};
use rustc_ast::unwrap_or;
use rustc_ast::NodeId; use rustc_ast::NodeId;
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::ptr_key::PtrKey; use rustc_data_structures::ptr_key::PtrKey;
@ -349,10 +348,10 @@ impl<'a> Resolver<'a> {
if !self.is_accessible_from(single_import.vis.get(), parent_scope.module) { if !self.is_accessible_from(single_import.vis.get(), parent_scope.module) {
continue; continue;
} }
let module = unwrap_or!( let module = match single_import.imported_module.get() {
single_import.imported_module.get(), Some(x) => x,
return Err((Undetermined, Weak::No)) None => return Err((Undetermined, Weak::No)),
); };
let ident = match single_import.kind { let ident = match single_import.kind {
ImportKind::Single { source, .. } => source, ImportKind::Single { source, .. } => source,
_ => unreachable!(), _ => unreachable!(),

View file

@ -500,8 +500,8 @@ impl<D: Decoder, const N: usize> Decodable<D> for [u8; N] {
d.read_seq(|d, len| { d.read_seq(|d, len| {
assert!(len == N); assert!(len == N);
let mut v = [0u8; N]; let mut v = [0u8; N];
for i in 0..len { for x in &mut v {
v[i] = d.read_seq_elt(|d| Decodable::decode(d))?; *x = d.read_seq_elt(|d| Decodable::decode(d))?;
} }
Ok(v) Ok(v)
}) })

View file

@ -816,6 +816,7 @@ symbols! {
mem_size_of, mem_size_of,
mem_size_of_val, mem_size_of_val,
mem_uninitialized, mem_uninitialized,
mem_variant_count,
mem_zeroed, mem_zeroed,
member_constraints, member_constraints,
memory, memory,
@ -893,6 +894,7 @@ symbols! {
nomem, nomem,
non_ascii_idents, non_ascii_idents,
non_exhaustive, non_exhaustive,
non_exhaustive_omitted_patterns_lint,
non_modrs_mods, non_modrs_mods,
none_error, none_error,
nontemporal_store, nontemporal_store,

View file

@ -0,0 +1,24 @@
use crate::spec::{Target, TargetOptions};
// This target is for uclibc Linux on ARMv7 without NEON or
// thumb-mode. See the thumbv7neon variant for enabling both.
pub fn target() -> Target {
let base = super::linux_uclibc_base::opts();
Target {
llvm_target: "armv7-unknown-linux-gnueabihf".to_string(),
pointer_width: 32,
data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".to_string(),
arch: "arm".to_string(),
options: TargetOptions {
// Info about features at https://wiki.debian.org/ArmHardFloatPort
features: "+v7,+vfp3,-d32,+thumb2,-neon".to_string(),
cpu: "generic".to_string(),
max_atomic_width: Some(64),
mcount: "_mcount".to_string(),
abi: "eabihf".to_string(),
..base
},
}
}

View file

@ -952,6 +952,8 @@ supported_targets! {
("bpfel-unknown-none", bpfel_unknown_none), ("bpfel-unknown-none", bpfel_unknown_none),
("armv6k-nintendo-3ds", armv6k_nintendo_3ds), ("armv6k-nintendo-3ds", armv6k_nintendo_3ds),
("armv7-unknown-linux-uclibceabihf", armv7_unknown_linux_uclibceabihf),
} }
/// Warnings encountered when parsing the target `json`. /// Warnings encountered when parsing the target `json`.

View file

@ -278,14 +278,14 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
fn visit_expr(&mut self, expr: &thir::Expr<'tcx>) { fn visit_expr(&mut self, expr: &thir::Expr<'tcx>) {
self.is_poly |= expr.ty.definitely_has_param_types_or_consts(self.tcx); self.is_poly |= expr.ty.definitely_has_param_types_or_consts(self.tcx);
if self.is_poly == false { if !self.is_poly {
visit::walk_expr(self, expr) visit::walk_expr(self, expr)
} }
} }
fn visit_pat(&mut self, pat: &thir::Pat<'tcx>) { fn visit_pat(&mut self, pat: &thir::Pat<'tcx>) {
self.is_poly |= pat.ty.definitely_has_param_types_or_consts(self.tcx); self.is_poly |= pat.ty.definitely_has_param_types_or_consts(self.tcx);
if self.is_poly == false { if !self.is_poly {
visit::walk_pat(self, pat); visit::walk_pat(self, pat);
} }
} }
@ -298,7 +298,7 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
let mut is_poly_vis = IsThirPolymorphic { is_poly: false, thir: body, tcx }; let mut is_poly_vis = IsThirPolymorphic { is_poly: false, thir: body, tcx };
visit::walk_expr(&mut is_poly_vis, &body[body_id]); visit::walk_expr(&mut is_poly_vis, &body[body_id]);
debug!("AbstractConstBuilder: is_poly={}", is_poly_vis.is_poly); debug!("AbstractConstBuilder: is_poly={}", is_poly_vis.is_poly);
if is_poly_vis.is_poly == false { if !is_poly_vis.is_poly {
return Ok(None); return Ok(None);
} }

View file

@ -704,7 +704,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
.filter_map(|lang_item| self.tcx.lang_items().require(*lang_item).ok()) .filter_map(|lang_item| self.tcx.lang_items().require(*lang_item).ok())
.collect(); .collect();
never_suggest_borrow.push(self.tcx.get_diagnostic_item(sym::Send).unwrap()); if let Some(def_id) = self.tcx.get_diagnostic_item(sym::Send) {
never_suggest_borrow.push(def_id);
}
let param_env = obligation.param_env; let param_env = obligation.param_env;
let trait_ref = poly_trait_ref.skip_binder(); let trait_ref = poly_trait_ref.skip_binder();

View file

@ -83,10 +83,10 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
) -> EvaluationResult { ) -> EvaluationResult {
match self.evaluate_obligation(obligation) { match self.evaluate_obligation(obligation) {
Ok(result) => result, Ok(result) => result,
Err(OverflowError::Cannonical) => { Err(OverflowError::Canonical) => {
let mut selcx = SelectionContext::with_query_mode(&self, TraitQueryMode::Standard); let mut selcx = SelectionContext::with_query_mode(&self, TraitQueryMode::Standard);
selcx.evaluate_root_obligation(obligation).unwrap_or_else(|r| match r { selcx.evaluate_root_obligation(obligation).unwrap_or_else(|r| match r {
OverflowError::Cannonical => { OverflowError::Canonical => {
span_bug!( span_bug!(
obligation.cause.span, obligation.cause.span,
"Overflow should be caught earlier in standard query mode: {:?}, {:?}", "Overflow should be caught earlier in standard query mode: {:?}, {:?}",

View file

@ -161,7 +161,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
Ok(Some(EvaluatedCandidate { candidate: c, evaluation: eval })) Ok(Some(EvaluatedCandidate { candidate: c, evaluation: eval }))
} }
Ok(_) => Ok(None), Ok(_) => Ok(None),
Err(OverflowError::Cannonical) => Err(Overflow), Err(OverflowError::Canonical) => Err(Overflow),
Err(OverflowError::ErrorReporting) => Err(ErrorReporting), Err(OverflowError::ErrorReporting) => Err(ErrorReporting),
}) })
.flat_map(Result::transpose) .flat_map(Result::transpose)

View file

@ -900,7 +900,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
match self.candidate_from_obligation(stack) { match self.candidate_from_obligation(stack) {
Ok(Some(c)) => self.evaluate_candidate(stack, &c), Ok(Some(c)) => self.evaluate_candidate(stack, &c),
Ok(None) => Ok(EvaluatedToAmbig), Ok(None) => Ok(EvaluatedToAmbig),
Err(Overflow) => Err(OverflowError::Cannonical), Err(Overflow) => Err(OverflowError::Canonical),
Err(ErrorReporting) => Err(OverflowError::ErrorReporting), Err(ErrorReporting) => Err(OverflowError::ErrorReporting),
Err(..) => Ok(EvaluatedToErr), Err(..) => Ok(EvaluatedToErr),
} }
@ -1064,7 +1064,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.infcx.report_overflow_error(error_obligation, true); self.infcx.report_overflow_error(error_obligation, true);
} }
TraitQueryMode::Canonical => { TraitQueryMode::Canonical => {
return Err(OverflowError::Cannonical); return Err(OverflowError::Canonical);
} }
} }
} }

View file

@ -892,7 +892,7 @@ impl<'tcx> TypeVisitor<'tcx> for BoundVarsCollector<'tcx> {
match r { match r {
ty::ReLateBound(index, br) if *index == self.binder_index => match br.kind { ty::ReLateBound(index, br) if *index == self.binder_index => match br.kind {
ty::BoundRegionKind::BrNamed(def_id, _name) => { ty::BoundRegionKind::BrNamed(def_id, _name) => {
if self.named_parameters.iter().find(|d| **d == def_id).is_none() { if !self.named_parameters.iter().any(|d| *d == def_id) {
self.named_parameters.push(def_id); self.named_parameters.push(def_id);
} }
} }

View file

@ -329,7 +329,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let obligation = Obligation::new( let obligation = Obligation::new(
ObligationCause::dummy_with_span(callee_expr.span), ObligationCause::dummy_with_span(callee_expr.span),
self.param_env, self.param_env,
predicate.clone(), *predicate,
); );
let result = self.infcx.evaluate_obligation(&obligation); let result = self.infcx.evaluate_obligation(&obligation);
self.tcx self.tcx

View file

@ -431,7 +431,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
.sess .sess
.source_map() .source_map()
.span_to_snippet(self.expr.span) .span_to_snippet(self.expr.span)
.map_or(false, |snip| snip.starts_with("(")); .map_or(false, |snip| snip.starts_with('('));
// Very crude check to see whether the expression must be wrapped // Very crude check to see whether the expression must be wrapped
// in parentheses for the suggestion to work (issue #89497). // in parentheses for the suggestion to work (issue #89497).

View file

@ -1887,7 +1887,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
let expr_snippet = let expr_snippet =
self.tcx.sess.source_map().span_to_snippet(expr.span).unwrap_or(String::new()); self.tcx.sess.source_map().span_to_snippet(expr.span).unwrap_or(String::new());
let is_wrapped = expr_snippet.starts_with("(") && expr_snippet.ends_with(")"); let is_wrapped = expr_snippet.starts_with('(') && expr_snippet.ends_with(')');
let after_open = expr.span.lo() + rustc_span::BytePos(1); let after_open = expr.span.lo() + rustc_span::BytePos(1);
let before_close = expr.span.hi() - rustc_span::BytePos(1); let before_close = expr.span.hi() - rustc_span::BytePos(1);

View file

@ -753,17 +753,27 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id); let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id);
let impl_ty = impl_ty.subst(self.tcx, impl_substs); let impl_ty = impl_ty.subst(self.tcx, impl_substs);
debug!("impl_ty: {:?}", impl_ty);
// Determine the receiver type that the method itself expects. // Determine the receiver type that the method itself expects.
let xform_tys = self.xform_self_ty(&item, impl_ty, impl_substs); let (xform_self_ty, xform_ret_ty) = self.xform_self_ty(&item, impl_ty, impl_substs);
debug!("xform_self_ty: {:?}, xform_ret_ty: {:?}", xform_self_ty, xform_ret_ty);
// We can't use normalize_associated_types_in as it will pollute the // We can't use normalize_associated_types_in as it will pollute the
// fcx's fulfillment context after this probe is over. // fcx's fulfillment context after this probe is over.
// Note: we only normalize `xform_self_ty` here since the normalization
// of the return type can lead to inference results that prohibit
// valid canidates from being found, see issue #85671
// FIXME Postponing the normalization of the return type likely only hides a deeper bug,
// which might be caused by the `param_env` itself. The clauses of the `param_env`
// maybe shouldn't include `Param`s, but rather fresh variables or be canonicalized,
// see isssue #89650
let cause = traits::ObligationCause::misc(self.span, self.body_id); let cause = traits::ObligationCause::misc(self.span, self.body_id);
let selcx = &mut traits::SelectionContext::new(self.fcx); let selcx = &mut traits::SelectionContext::new(self.fcx);
let traits::Normalized { value: (xform_self_ty, xform_ret_ty), obligations } = let traits::Normalized { value: xform_self_ty, obligations } =
traits::normalize(selcx, self.param_env, cause, xform_tys); traits::normalize(selcx, self.param_env, cause, xform_self_ty);
debug!( debug!(
"assemble_inherent_impl_probe: xform_self_ty = {:?}/{:?}", "assemble_inherent_impl_probe after normalization: xform_self_ty = {:?}/{:?}",
xform_self_ty, xform_ret_ty xform_self_ty, xform_ret_ty
); );
@ -1420,6 +1430,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}; };
let mut result = ProbeResult::Match; let mut result = ProbeResult::Match;
let mut xform_ret_ty = probe.xform_ret_ty;
debug!(?xform_ret_ty);
let selcx = &mut traits::SelectionContext::new(self); let selcx = &mut traits::SelectionContext::new(self);
let cause = traits::ObligationCause::misc(self.span, self.body_id); let cause = traits::ObligationCause::misc(self.span, self.body_id);
@ -1428,7 +1441,17 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
// match as well (or at least may match, sometimes we // match as well (or at least may match, sometimes we
// don't have enough information to fully evaluate). // don't have enough information to fully evaluate).
match probe.kind { match probe.kind {
InherentImplCandidate(substs, ref ref_obligations) => { InherentImplCandidate(ref substs, ref ref_obligations) => {
// `xform_ret_ty` hasn't been normalized yet, only `xform_self_ty`,
// see the reasons mentioned in the comments in `assemble_inherent_impl_probe`
// for why this is necessary
let traits::Normalized {
value: normalized_xform_ret_ty,
obligations: normalization_obligations,
} = traits::normalize(selcx, self.param_env, cause.clone(), probe.xform_ret_ty);
xform_ret_ty = normalized_xform_ret_ty;
debug!("xform_ret_ty after normalization: {:?}", xform_ret_ty);
// Check whether the impl imposes obligations we have to worry about. // Check whether the impl imposes obligations we have to worry about.
let impl_def_id = probe.item.container.id(); let impl_def_id = probe.item.container.id();
let impl_bounds = self.tcx.predicates_of(impl_def_id); let impl_bounds = self.tcx.predicates_of(impl_def_id);
@ -1442,7 +1465,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let candidate_obligations = impl_obligations let candidate_obligations = impl_obligations
.chain(norm_obligations.into_iter()) .chain(norm_obligations.into_iter())
.chain(ref_obligations.iter().cloned()); .chain(ref_obligations.iter().cloned())
.chain(normalization_obligations.into_iter());
// Evaluate those obligations to see if they might possibly hold. // Evaluate those obligations to see if they might possibly hold.
for o in candidate_obligations { for o in candidate_obligations {
let o = self.resolve_vars_if_possible(o); let o = self.resolve_vars_if_possible(o);
@ -1527,9 +1552,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
} }
if let ProbeResult::Match = result { if let ProbeResult::Match = result {
if let (Some(return_ty), Some(xform_ret_ty)) = if let (Some(return_ty), Some(xform_ret_ty)) = (self.return_type, xform_ret_ty) {
(self.return_type, probe.xform_ret_ty)
{
let xform_ret_ty = self.resolve_vars_if_possible(xform_ret_ty); let xform_ret_ty = self.resolve_vars_if_possible(xform_ret_ty);
debug!( debug!(
"comparing return_ty {:?} with xform ret ty {:?}", "comparing return_ty {:?} with xform ret ty {:?}",
@ -1669,6 +1692,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self.static_candidates.push(source); self.static_candidates.push(source);
} }
#[instrument(level = "debug", skip(self))]
fn xform_self_ty( fn xform_self_ty(
&self, &self,
item: &ty::AssocItem, item: &ty::AssocItem,
@ -1683,9 +1707,10 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self))]
fn xform_method_sig(&self, method: DefId, substs: SubstsRef<'tcx>) -> ty::FnSig<'tcx> { fn xform_method_sig(&self, method: DefId, substs: SubstsRef<'tcx>) -> ty::FnSig<'tcx> {
let fn_sig = self.tcx.fn_sig(method); let fn_sig = self.tcx.fn_sig(method);
debug!("xform_self_ty(fn_sig={:?}, substs={:?})", fn_sig, substs); debug!(?fn_sig);
assert!(!substs.has_escaping_bound_vars()); assert!(!substs.has_escaping_bound_vars());

View file

@ -413,7 +413,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RegionCtxt<'a, 'tcx> {
} }
hir::ExprKind::Match(ref discr, arms, _) => { hir::ExprKind::Match(ref discr, arms, _) => {
self.link_match(discr, &arms[..]); self.link_match(discr, arms);
intravisit::walk_expr(self, expr); intravisit::walk_expr(self, expr);
} }

View file

@ -65,6 +65,7 @@ use std::iter;
enum PlaceAncestryRelation { enum PlaceAncestryRelation {
Ancestor, Ancestor,
Descendant, Descendant,
SamePlace,
Divergent, Divergent,
} }
@ -564,7 +565,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
for possible_ancestor in min_cap_list.iter_mut() { for possible_ancestor in min_cap_list.iter_mut() {
match determine_place_ancestry_relation(&place, &possible_ancestor.place) { match determine_place_ancestry_relation(&place, &possible_ancestor.place) {
// current place is descendant of possible_ancestor // current place is descendant of possible_ancestor
PlaceAncestryRelation::Descendant => { PlaceAncestryRelation::Descendant | PlaceAncestryRelation::SamePlace => {
ancestor_found = true; ancestor_found = true;
let backup_path_expr_id = possible_ancestor.info.path_expr_id; let backup_path_expr_id = possible_ancestor.info.path_expr_id;
@ -2278,15 +2279,17 @@ fn determine_place_ancestry_relation(
let projections_b = &place_b.projections; let projections_b = &place_b.projections;
let same_initial_projections = let same_initial_projections =
iter::zip(projections_a, projections_b).all(|(proj_a, proj_b)| proj_a == proj_b); iter::zip(projections_a, projections_b).all(|(proj_a, proj_b)| proj_a.kind == proj_b.kind);
if same_initial_projections { if same_initial_projections {
use std::cmp::Ordering;
// First min(n, m) projections are the same // First min(n, m) projections are the same
// Select Ancestor/Descendant // Select Ancestor/Descendant
if projections_b.len() >= projections_a.len() { match projections_b.len().cmp(&projections_a.len()) {
PlaceAncestryRelation::Ancestor Ordering::Greater => PlaceAncestryRelation::Ancestor,
} else { Ordering::Equal => PlaceAncestryRelation::SamePlace,
PlaceAncestryRelation::Descendant Ordering::Less => PlaceAncestryRelation::Descendant,
} }
} else { } else {
PlaceAncestryRelation::Divergent PlaceAncestryRelation::Divergent

View file

@ -3,6 +3,7 @@ use rustc_errors::struct_span_err;
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
use rustc_hir::itemlikevisit::ItemLikeVisitor; use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_index::vec::IndexVec;
use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::ty::{self, TyCtxt};
use rustc_span::Symbol; use rustc_span::Symbol;
use rustc_trait_selection::traits::{self, SkipLeakCheck}; use rustc_trait_selection::traits::{self, SkipLeakCheck};
@ -158,14 +159,18 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
// This is advantageous to running the algorithm over the // This is advantageous to running the algorithm over the
// entire graph when there are many connected regions. // entire graph when there are many connected regions.
rustc_index::newtype_index! {
pub struct RegionId {
ENCODABLE = custom
}
}
struct ConnectedRegion { struct ConnectedRegion {
idents: SmallVec<[Symbol; 8]>, idents: SmallVec<[Symbol; 8]>,
impl_blocks: FxHashSet<usize>, impl_blocks: FxHashSet<usize>,
} }
// Highest connected region id let mut connected_regions: IndexVec<RegionId, _> = Default::default();
let mut highest_region_id = 0; // Reverse map from the Symbol to the connected region id.
let mut connected_region_ids = FxHashMap::default(); let mut connected_region_ids = FxHashMap::default();
let mut connected_regions = FxHashMap::default();
for (i, &(&_impl_def_id, impl_items)) in impls_items.iter().enumerate() { for (i, &(&_impl_def_id, impl_items)) in impls_items.iter().enumerate() {
if impl_items.len() == 0 { if impl_items.len() == 0 {
@ -173,7 +178,7 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
} }
// First obtain a list of existing connected region ids // First obtain a list of existing connected region ids
let mut idents_to_add = SmallVec::<[Symbol; 8]>::new(); let mut idents_to_add = SmallVec::<[Symbol; 8]>::new();
let ids = impl_items let mut ids = impl_items
.in_definition_order() .in_definition_order()
.filter_map(|item| { .filter_map(|item| {
let entry = connected_region_ids.entry(item.ident.name); let entry = connected_region_ids.entry(item.ident.name);
@ -184,62 +189,64 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
None None
} }
}) })
.collect::<FxHashSet<usize>>(); .collect::<SmallVec<[RegionId; 8]>>();
match ids.len() { // Sort the id list so that the algorithm is deterministic
0 | 1 => { ids.sort_unstable();
let id_to_set = if ids.is_empty() { let ids = ids;
match &ids[..] {
// Create a new connected region // Create a new connected region
let region = ConnectedRegion { [] => {
let id_to_set = connected_regions.next_index();
// Update the connected region ids
for ident in &idents_to_add {
connected_region_ids.insert(*ident, id_to_set);
}
connected_regions.insert(
id_to_set,
ConnectedRegion {
idents: idents_to_add, idents: idents_to_add,
impl_blocks: std::iter::once(i).collect(), impl_blocks: std::iter::once(i).collect(),
}; },
connected_regions.insert(highest_region_id, region); );
(highest_region_id, highest_region_id += 1).0 }
} else {
// Take the only id inside the list // Take the only id inside the list
let id_to_set = *ids.iter().next().unwrap(); &[id_to_set] => {
let region = connected_regions.get_mut(&id_to_set).unwrap(); let region = connected_regions[id_to_set].as_mut().unwrap();
region.impl_blocks.insert(i); region.impl_blocks.insert(i);
region.idents.extend_from_slice(&idents_to_add); region.idents.extend_from_slice(&idents_to_add);
id_to_set
};
let (_id, region) = connected_regions.iter().next().unwrap();
// Update the connected region ids // Update the connected region ids
for ident in region.idents.iter() { for ident in &idents_to_add {
connected_region_ids.insert(*ident, id_to_set); connected_region_ids.insert(*ident, id_to_set);
} }
} }
_ => {
// We have multiple connected regions to merge. // We have multiple connected regions to merge.
// In the worst case this might add impl blocks // In the worst case this might add impl blocks
// one by one and can thus be O(n^2) in the size // one by one and can thus be O(n^2) in the size
// of the resulting final connected region, but // of the resulting final connected region, but
// this is no issue as the final step to check // this is no issue as the final step to check
// for overlaps runs in O(n^2) as well. // for overlaps runs in O(n^2) as well.
&[id_to_set, ..] => {
// Take the smallest id from the list let mut region = connected_regions.remove(id_to_set).unwrap();
let id_to_set = *ids.iter().min().unwrap();
// Sort the id list so that the algorithm is deterministic
let mut ids = ids.into_iter().collect::<SmallVec<[usize; 8]>>();
ids.sort_unstable();
let mut region = connected_regions.remove(&id_to_set).unwrap();
region.idents.extend_from_slice(&idents_to_add);
region.impl_blocks.insert(i); region.impl_blocks.insert(i);
region.idents.extend_from_slice(&idents_to_add);
// Update the connected region ids
for ident in &idents_to_add {
connected_region_ids.insert(*ident, id_to_set);
}
// Remove other regions from ids.
for &id in ids.iter() { for &id in ids.iter() {
if id == id_to_set { if id == id_to_set {
continue; continue;
} }
let r = connected_regions.remove(&id).unwrap(); let r = connected_regions.remove(id).unwrap();
// Update the connected region ids
for ident in r.idents.iter() { for ident in r.idents.iter() {
connected_region_ids.insert(*ident, id_to_set); connected_region_ids.insert(*ident, id_to_set);
} }
region.idents.extend_from_slice(&r.idents); region.idents.extend_from_slice(&r.idents);
region.impl_blocks.extend(r.impl_blocks); region.impl_blocks.extend(r.impl_blocks);
} }
connected_regions.insert(id_to_set, region); connected_regions.insert(id_to_set, region);
} }
} }
@ -254,16 +261,22 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> {
let avg = impls.len() / connected_regions.len(); let avg = impls.len() / connected_regions.len();
let s = connected_regions let s = connected_regions
.iter() .iter()
.map(|r| r.1.impl_blocks.len() as isize - avg as isize) .flatten()
.map(|r| r.impl_blocks.len() as isize - avg as isize)
.map(|v| v.abs() as usize) .map(|v| v.abs() as usize)
.sum::<usize>(); .sum::<usize>();
s / connected_regions.len() s / connected_regions.len()
}, },
connected_regions.iter().map(|r| r.1.impl_blocks.len()).max().unwrap() connected_regions
.iter()
.flatten()
.map(|r| r.impl_blocks.len())
.max()
.unwrap()
); );
// List of connected regions is built. Now, run the overlap check // List of connected regions is built. Now, run the overlap check
// for each pair of impl blocks in the same connected region. // for each pair of impl blocks in the same connected region.
for (_id, region) in connected_regions.into_iter() { for region in connected_regions.into_iter().flatten() {
let mut impl_blocks = let mut impl_blocks =
region.impl_blocks.into_iter().collect::<SmallVec<[usize; 8]>>(); region.impl_blocks.into_iter().collect::<SmallVec<[usize; 8]>>();
impl_blocks.sort_unstable(); impl_blocks.sort_unstable();

View file

@ -63,6 +63,7 @@ This API is completely unstable and subject to change.
#![feature(in_band_lifetimes)] #![feature(in_band_lifetimes)]
#![feature(is_sorted)] #![feature(is_sorted)]
#![feature(iter_zip)] #![feature(iter_zip)]
#![feature(min_specialization)]
#![feature(nll)] #![feature(nll)]
#![feature(try_blocks)] #![feature(try_blocks)]
#![feature(never_type)] #![feature(never_type)]

View file

@ -187,6 +187,7 @@ impl<T> Box<T> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[inline(always)] #[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
pub fn new(x: T) -> Self { pub fn new(x: T) -> Self {
box x box x
} }
@ -211,6 +212,7 @@ impl<T> Box<T> {
/// ``` /// ```
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
#[inline] #[inline]
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> { pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
Self::new_uninit_in(Global) Self::new_uninit_in(Global)
@ -237,6 +239,7 @@ impl<T> Box<T> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[inline] #[inline]
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> { pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
Self::new_zeroed_in(Global) Self::new_zeroed_in(Global)
} }
@ -245,6 +248,7 @@ impl<T> Box<T> {
/// `x` will be pinned in memory and unable to be moved. /// `x` will be pinned in memory and unable to be moved.
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[stable(feature = "pin", since = "1.33.0")] #[stable(feature = "pin", since = "1.33.0")]
#[must_use]
#[inline(always)] #[inline(always)]
pub fn pin(x: T) -> Pin<Box<T>> { pub fn pin(x: T) -> Pin<Box<T>> {
(box x).into() (box x).into()
@ -339,6 +343,7 @@ impl<T, A: Allocator> Box<T, A> {
/// ``` /// ```
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
#[must_use]
#[inline] #[inline]
pub fn new_in(x: T, alloc: A) -> Self { pub fn new_in(x: T, alloc: A) -> Self {
let mut boxed = Self::new_uninit_in(alloc); let mut boxed = Self::new_uninit_in(alloc);
@ -395,6 +400,7 @@ impl<T, A: Allocator> Box<T, A> {
/// ``` /// ```
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[must_use]
// #[unstable(feature = "new_uninit", issue = "63291")] // #[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> { pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
let layout = Layout::new::<mem::MaybeUninit<T>>(); let layout = Layout::new::<mem::MaybeUninit<T>>();
@ -459,6 +465,7 @@ impl<T, A: Allocator> Box<T, A> {
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
// #[unstable(feature = "new_uninit", issue = "63291")] // #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> { pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
let layout = Layout::new::<mem::MaybeUninit<T>>(); let layout = Layout::new::<mem::MaybeUninit<T>>();
// NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
@ -503,6 +510,7 @@ impl<T, A: Allocator> Box<T, A> {
/// `x` will be pinned in memory and unable to be moved. /// `x` will be pinned in memory and unable to be moved.
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
#[must_use]
#[inline(always)] #[inline(always)]
pub fn pin_in(x: T, alloc: A) -> Pin<Self> pub fn pin_in(x: T, alloc: A) -> Pin<Self>
where where
@ -561,6 +569,7 @@ impl<T> Box<[T]> {
/// ``` /// ```
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> { pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
unsafe { RawVec::with_capacity(len).into_box(len) } unsafe { RawVec::with_capacity(len).into_box(len) }
} }
@ -585,6 +594,7 @@ impl<T> Box<[T]> {
/// [zeroed]: mem::MaybeUninit::zeroed /// [zeroed]: mem::MaybeUninit::zeroed
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "new_uninit", issue = "63291")] #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> { pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
unsafe { RawVec::with_capacity_zeroed(len).into_box(len) } unsafe { RawVec::with_capacity_zeroed(len).into_box(len) }
} }
@ -681,6 +691,7 @@ impl<T, A: Allocator> Box<[T], A> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
// #[unstable(feature = "new_uninit", issue = "63291")] // #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> { pub fn new_uninit_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) } unsafe { RawVec::with_capacity_in(len, alloc).into_box(len) }
} }
@ -708,6 +719,7 @@ impl<T, A: Allocator> Box<[T], A> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
// #[unstable(feature = "new_uninit", issue = "63291")] // #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> { pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Box<[mem::MaybeUninit<T>], A> {
unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) } unsafe { RawVec::with_capacity_zeroed_in(len, alloc).into_box(len) }
} }
@ -1277,6 +1289,7 @@ impl<T> From<T> for Box<T> {
/// from the stack into it. /// from the stack into it.
/// ///
/// # Examples /// # Examples
///
/// ```rust /// ```rust
/// let x = 5; /// let x = 5;
/// let boxed = Box::new(5); /// let boxed = Box::new(5);
@ -1330,6 +1343,12 @@ impl<T: Copy> From<&[T]> for Box<[T]> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[stable(feature = "box_from_cow", since = "1.45.0")] #[stable(feature = "box_from_cow", since = "1.45.0")]
impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> { impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> {
/// Converts a `Cow<'_, [T]>` into a `Box<[T]>`
///
/// When `cow` is the `Cow::Borrowed` variant, this
/// conversion allocates on the heap and copies the
/// underlying slice. Otherwise, it will try to reuse the owned
/// `Vec`'s allocation.
#[inline] #[inline]
fn from(cow: Cow<'_, [T]>) -> Box<[T]> { fn from(cow: Cow<'_, [T]>) -> Box<[T]> {
match cow { match cow {
@ -1348,6 +1367,7 @@ impl From<&str> for Box<str> {
/// and performs a copy of `s`. /// and performs a copy of `s`.
/// ///
/// # Examples /// # Examples
///
/// ```rust /// ```rust
/// let boxed: Box<str> = Box::from("hello"); /// let boxed: Box<str> = Box::from("hello");
/// println!("{}", boxed); /// println!("{}", boxed);
@ -1361,6 +1381,29 @@ impl From<&str> for Box<str> {
#[cfg(not(no_global_oom_handling))] #[cfg(not(no_global_oom_handling))]
#[stable(feature = "box_from_cow", since = "1.45.0")] #[stable(feature = "box_from_cow", since = "1.45.0")]
impl From<Cow<'_, str>> for Box<str> { impl From<Cow<'_, str>> for Box<str> {
/// Converts a `Cow<'_, str>` into a `Box<str>`
///
/// When `cow` is the `Cow::Borrowed` variant, this
/// conversion allocates on the heap and copies the
/// underlying `str`. Otherwise, it will try to reuse the owned
/// `String`'s allocation.
///
/// # Examples
///
/// ```rust
/// use std::borrow::Cow;
///
/// let unboxed = Cow::Borrowed("hello");
/// let boxed: Box<str> = Box::from(unboxed);
/// println!("{}", boxed);
/// ```
///
/// ```rust
/// # use std::borrow::Cow;
/// let unboxed = Cow::Owned("hello".to_string());
/// let boxed: Box<str> = Box::from(unboxed);
/// println!("{}", boxed);
/// ```
#[inline] #[inline]
fn from(cow: Cow<'_, str>) -> Box<str> { fn from(cow: Cow<'_, str>) -> Box<str> {
match cow { match cow {
@ -1403,6 +1446,7 @@ impl<T, const N: usize> From<[T; N]> for Box<[T]> {
/// This conversion moves the array to newly heap-allocated memory. /// This conversion moves the array to newly heap-allocated memory.
/// ///
/// # Examples /// # Examples
///
/// ```rust /// ```rust
/// let boxed: Box<[u8]> = Box::from([4, 2]); /// let boxed: Box<[u8]> = Box::from([4, 2]);
/// println!("{:?}", boxed); /// println!("{:?}", boxed);
@ -1416,6 +1460,15 @@ impl<T, const N: usize> From<[T; N]> for Box<[T]> {
impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> { impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
type Error = Box<[T]>; type Error = Box<[T]>;
/// Attempts to convert a `Box<[T]>` into a `Box<[T; N]>`.
///
/// The conversion occurs in-place and does not require a
/// new memory allocation.
///
/// # Errors
///
/// Returns the old `Box<[T]>` in the `Err` variant if
/// `boxed_slice.len()` does not equal `N`.
fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> { fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
if boxed_slice.len() == N { if boxed_slice.len() == N {
Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) }) Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) })

View file

@ -364,6 +364,7 @@ impl<T: Ord> BinaryHeap<T> {
/// heap.push(4); /// heap.push(4);
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
pub fn new() -> BinaryHeap<T> { pub fn new() -> BinaryHeap<T> {
BinaryHeap { data: vec![] } BinaryHeap { data: vec![] }
} }
@ -383,6 +384,7 @@ impl<T: Ord> BinaryHeap<T> {
/// heap.push(4); /// heap.push(4);
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
pub fn with_capacity(capacity: usize) -> BinaryHeap<T> { pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
BinaryHeap { data: Vec::with_capacity(capacity) } BinaryHeap { data: Vec::with_capacity(capacity) }
} }
@ -848,6 +850,7 @@ impl<T> BinaryHeap<T> {
/// ///
/// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]); /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
/// ``` /// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")] #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
pub fn into_iter_sorted(self) -> IntoIterSorted<T> { pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
IntoIterSorted { inner: self } IntoIterSorted { inner: self }
@ -1006,6 +1009,7 @@ impl<T> BinaryHeap<T> {
/// ///
/// io::sink().write(heap.as_slice()).unwrap(); /// io::sink().write(heap.as_slice()).unwrap();
/// ``` /// ```
#[must_use]
#[unstable(feature = "binary_heap_as_slice", issue = "83659")] #[unstable(feature = "binary_heap_as_slice", issue = "83659")]
pub fn as_slice(&self) -> &[T] { pub fn as_slice(&self) -> &[T] {
self.data.as_slice() self.data.as_slice()
@ -1028,6 +1032,7 @@ impl<T> BinaryHeap<T> {
/// println!("{}", x); /// println!("{}", x);
/// } /// }
/// ``` /// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")] #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
pub fn into_vec(self) -> Vec<T> { pub fn into_vec(self) -> Vec<T> {
self.into() self.into()

View file

@ -502,6 +502,7 @@ impl<K, V> BTreeMap<K, V> {
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
#[must_use]
pub const fn new() -> BTreeMap<K, V> { pub const fn new() -> BTreeMap<K, V> {
BTreeMap { root: None, length: 0 } BTreeMap { root: None, length: 0 }
} }
@ -1264,6 +1265,7 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(keys, [1, 2]); /// assert_eq!(keys, [1, 2]);
/// ``` /// ```
#[inline] #[inline]
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "map_into_keys_values", since = "1.54.0")] #[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub fn into_keys(self) -> IntoKeys<K, V> { pub fn into_keys(self) -> IntoKeys<K, V> {
IntoKeys { inner: self.into_iter() } IntoKeys { inner: self.into_iter() }
@ -1286,6 +1288,7 @@ impl<K, V> BTreeMap<K, V> {
/// assert_eq!(values, ["hello", "goodbye"]); /// assert_eq!(values, ["hello", "goodbye"]);
/// ``` /// ```
#[inline] #[inline]
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "map_into_keys_values", since = "1.54.0")] #[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub fn into_values(self) -> IntoValues<K, V> { pub fn into_values(self) -> IntoValues<K, V> {
IntoValues { inner: self.into_iter() } IntoValues { inner: self.into_iter() }

View file

@ -448,6 +448,7 @@ impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
/// } /// }
/// assert_eq!(map["poneyland"], 22); /// assert_eq!(map["poneyland"], 22);
/// ``` /// ```
#[must_use = "`self` will be dropped if the result is not used"]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V { pub fn into_mut(self) -> &'a mut V {
self.handle.into_val_mut() self.handle.into_val_mut()

View file

@ -1755,20 +1755,20 @@ fn test_send() {
#[test] #[test]
fn test_ord_absence() { fn test_ord_absence() {
fn map<K>(mut map: BTreeMap<K, ()>) { fn map<K>(mut map: BTreeMap<K, ()>) {
map.is_empty(); let _ = map.is_empty();
map.len(); let _ = map.len();
map.clear(); map.clear();
map.iter(); let _ = map.iter();
map.iter_mut(); let _ = map.iter_mut();
map.keys(); let _ = map.keys();
map.values(); let _ = map.values();
map.values_mut(); let _ = map.values_mut();
if true { if true {
map.into_values(); let _ = map.into_values();
} else if true { } else if true {
map.into_iter(); let _ = map.into_iter();
} else { } else {
map.into_keys(); let _ = map.into_keys();
} }
} }

View file

@ -248,6 +248,7 @@ impl<T> BTreeSet<T> {
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")] #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
#[must_use]
pub const fn new() -> BTreeSet<T> { pub const fn new() -> BTreeSet<T> {
BTreeSet { map: BTreeMap::new() } BTreeSet { map: BTreeMap::new() }
} }
@ -534,6 +535,7 @@ impl<T> BTreeSet<T> {
/// b.insert(1); /// b.insert(1);
/// assert_eq!(a.is_disjoint(&b), false); /// assert_eq!(a.is_disjoint(&b), false);
/// ``` /// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool
where where
@ -559,6 +561,7 @@ impl<T> BTreeSet<T> {
/// set.insert(4); /// set.insert(4);
/// assert_eq!(set.is_subset(&sup), false); /// assert_eq!(set.is_subset(&sup), false);
/// ``` /// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn is_subset(&self, other: &BTreeSet<T>) -> bool pub fn is_subset(&self, other: &BTreeSet<T>) -> bool
where where
@ -638,6 +641,7 @@ impl<T> BTreeSet<T> {
/// set.insert(2); /// set.insert(2);
/// assert_eq!(set.is_superset(&sub), true); /// assert_eq!(set.is_superset(&sub), true);
/// ``` /// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn is_superset(&self, other: &BTreeSet<T>) -> bool pub fn is_superset(&self, other: &BTreeSet<T>) -> bool
where where

Some files were not shown because too many files have changed in this diff Show more