1
Fork 0

Auto merge of #135005 - matthiaskrgr:rollup-5ubuitt, r=matthiaskrgr

Rollup of 5 pull requests

Successful merges:

 - #134967 (handle submodules automatically on `doc` steps)
 - #134973 (Fix typos)
 - #134984 (`ObligationCause` construction tweaks in typeck)
 - #134985 (Remove qualification of `std::cmp::Ordering` in `Ord` doc)
 - #135000 (Fix ICE when opaque captures a duplicated/invalid lifetime)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-01-01 21:37:33 +00:00
commit 62b13a9019
26 changed files with 129 additions and 109 deletions

View file

@ -1845,11 +1845,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
GenericParamKind::Lifetime => { GenericParamKind::Lifetime => {
// AST resolution emitted an error on those parameters, so we lower them using // AST resolution emitted an error on those parameters, so we lower them using
// `ParamName::Error`. // `ParamName::Error`.
let ident = self.lower_ident(param.ident);
let param_name = let param_name =
if let Some(LifetimeRes::Error) = self.resolver.get_lifetime_res(param.id) { if let Some(LifetimeRes::Error) = self.resolver.get_lifetime_res(param.id) {
ParamName::Error ParamName::Error(ident)
} else { } else {
let ident = self.lower_ident(param.ident);
ParamName::Plain(ident) ParamName::Plain(ident)
}; };
let kind = let kind =

View file

@ -52,6 +52,13 @@ pub enum ParamName {
/// Some user-given name like `T` or `'x`. /// Some user-given name like `T` or `'x`.
Plain(Ident), Plain(Ident),
/// Indicates an illegal name was given and an error has been
/// reported (so we should squelch other derived errors).
///
/// Occurs when, e.g., `'_` is used in the wrong place, or a
/// lifetime name is duplicated.
Error(Ident),
/// Synthetic name generated when user elided a lifetime in an impl header. /// Synthetic name generated when user elided a lifetime in an impl header.
/// ///
/// E.g., the lifetimes in cases like these: /// E.g., the lifetimes in cases like these:
@ -67,18 +74,13 @@ pub enum ParamName {
/// where `'f` is something like `Fresh(0)`. The indices are /// where `'f` is something like `Fresh(0)`. The indices are
/// unique per impl, but not necessarily continuous. /// unique per impl, but not necessarily continuous.
Fresh, Fresh,
/// Indicates an illegal name was given and an error has been
/// reported (so we should squelch other derived errors). Occurs
/// when, e.g., `'_` is used in the wrong place.
Error,
} }
impl ParamName { impl ParamName {
pub fn ident(&self) -> Ident { pub fn ident(&self) -> Ident {
match *self { match *self {
ParamName::Plain(ident) => ident, ParamName::Plain(ident) | ParamName::Error(ident) => ident,
ParamName::Fresh | ParamName::Error => Ident::with_dummy_span(kw::UnderscoreLifetime), ParamName::Fresh => Ident::with_dummy_span(kw::UnderscoreLifetime),
} }
} }
} }

View file

@ -928,8 +928,8 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(
) -> V::Result { ) -> V::Result {
try_visit!(visitor.visit_id(param.hir_id)); try_visit!(visitor.visit_id(param.hir_id));
match param.name { match param.name {
ParamName::Plain(ident) => try_visit!(visitor.visit_ident(ident)), ParamName::Plain(ident) | ParamName::Error(ident) => try_visit!(visitor.visit_ident(ident)),
ParamName::Error | ParamName::Fresh => {} ParamName::Fresh => {}
} }
match param.kind { match param.kind {
GenericParamKind::Lifetime { .. } => {} GenericParamKind::Lifetime { .. } => {}

View file

@ -2007,7 +2007,10 @@ fn check_variances_for_type_defn<'tcx>(
} }
match hir_param.name { match hir_param.name {
hir::ParamName::Error => {} hir::ParamName::Error(_) => {
// Don't report a bivariance error for a lifetime that isn't
// even valid to name.
}
_ => { _ => {
let has_explicit_bounds = explicitly_bounded_params.contains(&parameter); let has_explicit_bounds = explicitly_bounded_params.contains(&parameter);
report_bivariance(tcx, hir_param, has_explicit_bounds, item); report_bivariance(tcx, hir_param, has_explicit_bounds, item);

View file

@ -7,7 +7,7 @@ use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir, HirId, LangItem}; use rustc_hir::{self as hir, HirId, LangItem};
use rustc_hir_analysis::autoderef::Autoderef; use rustc_hir_analysis::autoderef::Autoderef;
use rustc_infer::infer; use rustc_infer::infer;
use rustc_infer::traits::{self, Obligation, ObligationCause, ObligationCauseCode}; use rustc_infer::traits::{Obligation, ObligationCause, ObligationCauseCode};
use rustc_middle::ty::adjustment::{ use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability,
}; };
@ -512,7 +512,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.register_bound( self.register_bound(
ty, ty,
self.tcx.require_lang_item(hir::LangItem::Tuple, Some(sp)), self.tcx.require_lang_item(hir::LangItem::Tuple, Some(sp)),
traits::ObligationCause::new(sp, self.body_id, ObligationCauseCode::RustCall), self.cause(sp, ObligationCauseCode::RustCall),
); );
self.require_type_is_sized(ty, sp, ObligationCauseCode::RustCall); self.require_type_is_sized(ty, sp, ObligationCauseCode::RustCall);
} else { } else {

View file

@ -580,11 +580,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let mut selcx = traits::SelectionContext::new(self); let mut selcx = traits::SelectionContext::new(self);
// Create an obligation for `Source: CoerceUnsized<Target>`. // Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = let cause = self.cause(self.cause.span, ObligationCauseCode::Coercion { source, target });
ObligationCause::new(self.cause.span, self.body_id, ObligationCauseCode::Coercion {
source,
target,
});
// Use a FIFO queue for this custom fulfillment procedure. // Use a FIFO queue for this custom fulfillment procedure.
// //

View file

@ -22,7 +22,6 @@ use rustc_hir::{ExprKind, HirId, QPath};
use rustc_hir_analysis::hir_ty_lowering::{FeedConstTy, HirTyLowerer as _}; use rustc_hir_analysis::hir_ty_lowering::{FeedConstTy, HirTyLowerer as _};
use rustc_infer::infer; use rustc_infer::infer;
use rustc_infer::infer::{DefineOpaqueTypes, InferOk}; use rustc_infer::infer::{DefineOpaqueTypes, InferOk};
use rustc_infer::traits::ObligationCause;
use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::query::NoSolution;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase}; use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase};
use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::error::{ExpectedFound, TypeError};
@ -1174,9 +1173,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
for err in errors { for err in errors {
let cause = &mut err.obligation.cause; let cause = &mut err.obligation.cause;
if let ObligationCauseCode::OpaqueReturnType(None) = cause.code() { if let ObligationCauseCode::OpaqueReturnType(None) = cause.code() {
let new_cause = ObligationCause::new( let new_cause = self.cause(
cause.span, cause.span,
cause.body_id,
ObligationCauseCode::OpaqueReturnType(Some((return_expr_ty, hir_id))), ObligationCauseCode::OpaqueReturnType(Some((return_expr_ty, hir_id))),
); );
*cause = new_cause; *cause = new_cause;
@ -3856,7 +3854,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Enums are anyway always sized. But just to safeguard against future // Enums are anyway always sized. But just to safeguard against future
// language extensions, let's double-check. // language extensions, let's double-check.
self.require_type_is_sized(field_ty, expr.span, ObligationCauseCode::Misc); self.require_type_is_sized(
field_ty,
expr.span,
ObligationCauseCode::FieldSized {
adt_kind: AdtKind::Enum,
span: self.tcx.def_span(field.did),
last: false,
},
);
if field.vis.is_accessible_from(sub_def_scope, self.tcx) { if field.vis.is_accessible_from(sub_def_scope, self.tcx) {
self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span, None); self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span, None);
@ -3884,11 +3890,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let field_ty = self.field_ty(expr.span, field, args); let field_ty = self.field_ty(expr.span, field, args);
if self.tcx.features().offset_of_slice() { if self.tcx.features().offset_of_slice() {
self.require_type_has_static_alignment( self.require_type_has_static_alignment(field_ty, expr.span);
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
} else { } else {
self.require_type_is_sized( self.require_type_is_sized(
field_ty, field_ty,
@ -3917,11 +3919,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{ {
if let Some(&field_ty) = tys.get(index) { if let Some(&field_ty) = tys.get(index) {
if self.tcx.features().offset_of_slice() { if self.tcx.features().offset_of_slice() {
self.require_type_has_static_alignment( self.require_type_has_static_alignment(field_ty, expr.span);
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
} else { } else {
self.require_type_is_sized( self.require_type_is_sized(
field_ty, field_ty,

View file

@ -384,7 +384,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
code: traits::ObligationCauseCode<'tcx>, code: traits::ObligationCauseCode<'tcx>,
def_id: DefId, def_id: DefId,
) { ) {
self.register_bound(ty, def_id, traits::ObligationCause::new(span, self.body_id, code)); self.register_bound(ty, def_id, self.cause(span, code));
} }
pub(crate) fn require_type_is_sized( pub(crate) fn require_type_is_sized(
@ -410,12 +410,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
} }
pub(crate) fn require_type_has_static_alignment( pub(crate) fn require_type_has_static_alignment(&self, ty: Ty<'tcx>, span: Span) {
&self,
ty: Ty<'tcx>,
span: Span,
code: traits::ObligationCauseCode<'tcx>,
) {
if !ty.references_error() { if !ty.references_error() {
let tail = self.tcx.struct_tail_raw( let tail = self.tcx.struct_tail_raw(
ty, ty,
@ -434,7 +429,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else { } else {
// We can't be sure, let's required full `Sized`. // We can't be sure, let's required full `Sized`.
let lang_item = self.tcx.require_lang_item(LangItem::Sized, None); let lang_item = self.tcx.require_lang_item(LangItem::Sized, None);
self.require_type_meets(ty, span, code, lang_item); self.require_type_meets(ty, span, ObligationCauseCode::Misc, lang_item);
} }
} }
} }
@ -572,7 +567,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
code: traits::ObligationCauseCode<'tcx>, code: traits::ObligationCauseCode<'tcx>,
) { ) {
// WF obligations never themselves fail, so no real need to give a detailed cause: // WF obligations never themselves fail, so no real need to give a detailed cause:
let cause = traits::ObligationCause::new(span, self.body_id, code); let cause = self.cause(span, code);
self.register_predicate(traits::Obligation::new( self.register_predicate(traits::Obligation::new(
self.tcx, self.tcx,
cause, cause,
@ -1426,9 +1421,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let bounds = self.instantiate_bounds(span, def_id, args); let bounds = self.instantiate_bounds(span, def_id, args);
for obligation in traits::predicates_for_generics( for obligation in traits::predicates_for_generics(
|idx, predicate_span| { |idx, predicate_span| self.cause(span, code(idx, predicate_span)),
traits::ObligationCause::new(span, self.body_id, code(idx, predicate_span))
},
param_env, param_env,
bounds, bounds,
) { ) {
@ -1561,7 +1554,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
query_result: &Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>, query_result: &Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>,
) -> InferResult<'tcx, Ty<'tcx>> { ) -> InferResult<'tcx, Ty<'tcx>> {
self.instantiate_query_response_and_region_obligations( self.instantiate_query_response_and_region_obligations(
&traits::ObligationCause::misc(span, self.body_id), &self.misc(span),
self.param_env, self.param_env,
original_values, original_values,
query_result, query_result,

View file

@ -207,7 +207,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.register_wf_obligation( self.register_wf_obligation(
fn_input_ty.into(), fn_input_ty.into(),
arg_expr.span, arg_expr.span,
ObligationCauseCode::Misc, ObligationCauseCode::WellFormed(None),
); );
} }

View file

@ -601,7 +601,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
self.call_expr.hir_id, self.call_expr.hir_id,
idx, idx,
); );
traits::ObligationCause::new(self.span, self.body_id, code) self.cause(self.span, code)
}, },
self.param_env, self.param_env,
method_predicates, method_predicates,

View file

@ -1739,8 +1739,8 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
&self, &self,
trait_ref: ty::TraitRef<'tcx>, trait_ref: ty::TraitRef<'tcx>,
) -> traits::SelectionResult<'tcx, traits::Selection<'tcx>> { ) -> traits::SelectionResult<'tcx, traits::Selection<'tcx>> {
let cause = traits::ObligationCause::misc(self.span, self.body_id); let obligation =
let obligation = traits::Obligation::new(self.tcx, cause, self.param_env, trait_ref); traits::Obligation::new(self.tcx, self.misc(self.span), self.param_env, trait_ref);
traits::SelectionContext::new(self).select(&obligation) traits::SelectionContext::new(self).select(&obligation)
} }
@ -1841,7 +1841,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self.scope_expr_id, self.scope_expr_id,
idx, idx,
); );
ObligationCause::new(self.span, self.body_id, code) self.cause(self.span, code)
}, },
self.param_env, self.param_env,
impl_bounds, impl_bounds,

View file

@ -105,8 +105,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return false; return false;
}; };
let trait_ref = ty::TraitRef::new(self.tcx, into_iterator_trait, [ty]); let trait_ref = ty::TraitRef::new(self.tcx, into_iterator_trait, [ty]);
let cause = ObligationCause::new(span, self.body_id, ObligationCauseCode::Misc); let obligation = Obligation::new(self.tcx, self.misc(span), self.param_env, trait_ref);
let obligation = Obligation::new(self.tcx, cause, self.param_env, trait_ref);
if !self.predicate_must_hold_modulo_regions(&obligation) { if !self.predicate_must_hold_modulo_regions(&obligation) {
return false; return false;
} }
@ -3489,7 +3488,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let pred = ty::TraitRef::new(self.tcx, unpin_trait, [*rcvr_ty]); let pred = ty::TraitRef::new(self.tcx, unpin_trait, [*rcvr_ty]);
let unpin = self.predicate_must_hold_considering_regions(&Obligation::new( let unpin = self.predicate_must_hold_considering_regions(&Obligation::new(
self.tcx, self.tcx,
ObligationCause::misc(rcvr.span, self.body_id), self.misc(rcvr.span),
self.param_env, self.param_env,
pred, pred,
)); ));

View file

@ -796,7 +796,7 @@ impl<T: Clone> Clone for Reverse<T> {
/// } /// }
/// ///
/// impl Ord for Character { /// impl Ord for Character {
/// fn cmp(&self, other: &Self) -> std::cmp::Ordering { /// fn cmp(&self, other: &Self) -> Ordering {
/// self.experience /// self.experience
/// .cmp(&other.experience) /// .cmp(&other.experience)
/// .then(self.health.cmp(&other.health)) /// .then(self.health.cmp(&other.health))

View file

@ -18,19 +18,10 @@ use crate::core::builder::{
self, Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step, crate_description, self, Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step, crate_description,
}; };
use crate::core::config::{Config, TargetSelection}; use crate::core::config::{Config, TargetSelection};
use crate::utils::helpers::{symlink_dir, t, up_to_date}; use crate::helpers::{is_path_in_submodule, symlink_dir, t, up_to_date};
macro_rules! submodule_helper {
($path:expr, submodule) => {
$path
};
($path:expr, submodule = $submodule:literal) => {
$submodule
};
}
macro_rules! book { macro_rules! book {
($($name:ident, $path:expr, $book_name:expr, $lang:expr $(, submodule $(= $submodule:literal)? )? ;)+) => { ($($name:ident, $path:expr, $book_name:expr, $lang:expr ;)+) => {
$( $(
#[derive(Debug, Clone, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name { pub struct $name {
@ -53,10 +44,10 @@ macro_rules! book {
} }
fn run(self, builder: &Builder<'_>) { fn run(self, builder: &Builder<'_>) {
$( if is_path_in_submodule(&builder, $path) {
let path = submodule_helper!( $path, submodule $( = $submodule )? ); builder.require_submodule($path, None);
builder.require_submodule(path, None); }
)?
builder.ensure(RustbookSrc { builder.ensure(RustbookSrc {
target: self.target, target: self.target,
name: $book_name.to_owned(), name: $book_name.to_owned(),
@ -77,12 +68,12 @@ macro_rules! book {
// FIXME: Make checking for a submodule automatic somehow (maybe by having a list of all submodules // FIXME: Make checking for a submodule automatic somehow (maybe by having a list of all submodules
// and checking against it?). // and checking against it?).
book!( book!(
CargoBook, "src/tools/cargo/src/doc", "cargo", &[], submodule = "src/tools/cargo"; CargoBook, "src/tools/cargo/src/doc", "cargo", &[];
ClippyBook, "src/tools/clippy/book", "clippy", &[]; ClippyBook, "src/tools/clippy/book", "clippy", &[];
EditionGuide, "src/doc/edition-guide", "edition-guide", &[], submodule; EditionGuide, "src/doc/edition-guide", "edition-guide", &[];
EmbeddedBook, "src/doc/embedded-book", "embedded-book", &[], submodule; EmbeddedBook, "src/doc/embedded-book", "embedded-book", &[];
Nomicon, "src/doc/nomicon", "nomicon", &[], submodule; Nomicon, "src/doc/nomicon", "nomicon", &[];
RustByExample, "src/doc/rust-by-example", "rust-by-example", &["ja", "zh"], submodule; RustByExample, "src/doc/rust-by-example", "rust-by-example", &["ja", "zh"];
RustdocBook, "src/doc/rustdoc", "rustdoc", &[]; RustdocBook, "src/doc/rustdoc", "rustdoc", &[];
StyleGuide, "src/doc/style-guide", "style-guide", &[]; StyleGuide, "src/doc/style-guide", "style-guide", &[];
); );
@ -910,7 +901,6 @@ macro_rules! tool_doc {
$(rustc_tool = $rustc_tool:literal, )? $(rustc_tool = $rustc_tool:literal, )?
$(is_library = $is_library:expr,)? $(is_library = $is_library:expr,)?
$(crates = $crates:expr)? $(crates = $crates:expr)?
$(, submodule $(= $submodule:literal)? )?
) => { ) => {
#[derive(Debug, Clone, Hash, PartialEq, Eq)] #[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $tool { pub struct $tool {
@ -938,14 +928,12 @@ macro_rules! tool_doc {
/// we do not merge it with the other documentation from std, test and /// we do not merge it with the other documentation from std, test and
/// proc_macros. This is largely just a wrapper around `cargo doc`. /// proc_macros. This is largely just a wrapper around `cargo doc`.
fn run(self, builder: &Builder<'_>) { fn run(self, builder: &Builder<'_>) {
let source_type = SourceType::InTree; let mut source_type = SourceType::InTree;
$(
let _ = source_type; // silence the "unused variable" warning
let source_type = SourceType::Submodule;
let path = submodule_helper!( $path, submodule $( = $submodule )? ); if is_path_in_submodule(&builder, $path) {
builder.require_submodule(path, None); source_type = SourceType::Submodule;
)? builder.require_submodule($path, None);
}
let stage = builder.top_stage; let stage = builder.top_stage;
let target = self.target; let target = self.target;
@ -1054,8 +1042,7 @@ tool_doc!(
"crates-io", "crates-io",
"mdman", "mdman",
"rustfix", "rustfix",
], ]
submodule = "src/tools/cargo"
); );
tool_doc!(Tidy, "src/tools/tidy", rustc_tool = false, crates = ["tidy"]); tool_doc!(Tidy, "src/tools/tidy", rustc_tool = false, crates = ["tidy"]);
tool_doc!( tool_doc!(

View file

@ -60,6 +60,12 @@ pub fn is_dylib(path: &Path) -> bool {
}) })
} }
/// Returns `true` if the given path is part of a submodule.
pub fn is_path_in_submodule(builder: &Builder<'_>, path: &str) -> bool {
let submodule_paths = build_helper::util::parse_gitmodules(&builder.src);
submodule_paths.iter().any(|submodule_path| path.starts_with(submodule_path))
}
fn is_aix_shared_archive(path: &Path) -> bool { fn is_aix_shared_archive(path: &Path) -> bool {
let file = match fs::File::open(path) { let file = match fs::File::open(path) {
Ok(file) => file, Ok(file) => file,

View file

@ -3,8 +3,8 @@ use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
use crate::utils::helpers::{ use crate::utils::helpers::{
check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, set_file_times, check_cfg_arg, extract_beta_rev, hex_encode, is_path_in_submodule, make, program_out_of_date,
symlink_dir, set_file_times, symlink_dir,
}; };
use crate::{Config, Flags}; use crate::{Config, Flags};
@ -115,3 +115,18 @@ fn test_set_file_times_sanity_check() {
assert_eq!(found_metadata.accessed().unwrap(), unix_epoch); assert_eq!(found_metadata.accessed().unwrap(), unix_epoch);
assert_eq!(found_metadata.modified().unwrap(), unix_epoch) assert_eq!(found_metadata.modified().unwrap(), unix_epoch)
} }
#[test]
fn test_is_path_in_submodule() {
let config = Config::parse_inner(Flags::parse(&["build".into(), "--dry-run".into()]), |&_| {
Ok(Default::default())
});
let build = crate::Build::new(config.clone());
let builder = crate::core::builder::Builder::new(&build);
assert!(!is_path_in_submodule(&builder, "invalid/path"));
assert!(is_path_in_submodule(&builder, "src/tools/cargo"));
assert!(is_path_in_submodule(&builder, "src/llvm-project"));
// Make sure subdirs are handled properly
assert!(is_path_in_submodule(&builder, "src/tools/cargo/random-subdir"));
}

View file

@ -91,7 +91,7 @@ impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, I> {
Some(e) => return Some(e), Some(e) => return Some(e),
None => { None => {
if !self.footnotes.is_empty() { if !self.footnotes.is_empty() {
// After all the markdown is emmited, emit an <hr> then all the footnotes // After all the markdown is emitted, emit an <hr> then all the footnotes
// in a list. // in a list.
let defs: Vec<_> = self.footnotes.drain(..).map(|(_, x)| x).collect(); let defs: Vec<_> = self.footnotes.drain(..).map(|(_, x)| x).collect();
self.existing_footnotes.fetch_add(defs.len(), Ordering::Relaxed); self.existing_footnotes.fetch_add(defs.len(), Ordering::Relaxed);

View file

@ -27,7 +27,7 @@ struct Offset {
} }
impl<F> SortedTemplate<F> { impl<F> SortedTemplate<F> {
/// Generate this template from arbitary text. /// Generate this template from arbitrary text.
/// Will insert wherever the substring `delimiter` can be found. /// Will insert wherever the substring `delimiter` can be found.
/// Errors if it does not appear exactly once. /// Errors if it does not appear exactly once.
pub(crate) fn from_template(template: &str, delimiter: &str) -> Result<Self, Error> { pub(crate) fn from_template(template: &str, delimiter: &str) -> Result<Self, Error> {

View file

@ -2848,7 +2848,7 @@ class DocSearch {
* - Limit checks that Ty matches Vec<Ty>, * - Limit checks that Ty matches Vec<Ty>,
* but not Vec<ParamEnvAnd<WithInfcx<ConstTy<Interner<Ty=Ty>>>>> * but not Vec<ParamEnvAnd<WithInfcx<ConstTy<Interner<Ty=Ty>>>>>
* *
* @return {[FunctionType]|null} - Returns highlighed results if a match, null otherwise. * @return {[FunctionType]|null} - Returns highlighted results if a match, null otherwise.
*/ */
function unifyFunctionTypes( function unifyFunctionTypes(
fnTypesIn, fnTypesIn,
@ -3148,7 +3148,7 @@ class DocSearch {
* - Limit checks that Ty matches Vec<Ty>, * - Limit checks that Ty matches Vec<Ty>,
* but not Vec<ParamEnvAnd<WithInfcx<ConstTy<Interner<Ty=Ty>>>>> * but not Vec<ParamEnvAnd<WithInfcx<ConstTy<Interner<Ty=Ty>>>>>
* *
* @return {[FunctionType]|null} - Returns highlighed results if a match, null otherwise. * @return {[FunctionType]|null} - Returns highlighted results if a match, null otherwise.
*/ */
function unifyGenericTypes( function unifyGenericTypes(
fnTypesIn, fnTypesIn,

View file

@ -584,7 +584,7 @@ install_components() {
# HACK: Try to support overriding --docdir. Paths with the form # HACK: Try to support overriding --docdir. Paths with the form
# "share/doc/$product/" can be redirected to a single --docdir # "share/doc/$product/" can be redirected to a single --docdir
# path. If the following detects that --docdir has been specified # path. If the following detects that --docdir has been specified
# then it will replace everything preceeding the "$product" path # then it will replace everything preceding the "$product" path
# component. The problem here is that the combined rust installer # component. The problem here is that the combined rust installer
# contains two "products": rust and cargo; so the contents of those # contains two "products": rust and cargo; so the contents of those
# directories will both be dumped into the same directory; and the # directories will both be dumped into the same directory; and the

View file

@ -214,7 +214,7 @@ const EXCEPTIONS_BOOTSTRAP: ExceptionList = &[
]; ];
const EXCEPTIONS_UEFI_QEMU_TEST: ExceptionList = &[ const EXCEPTIONS_UEFI_QEMU_TEST: ExceptionList = &[
("r-efi", "MIT OR Apache-2.0 OR LGPL-2.1-or-later"), // LGPL is not acceptible, but we use it under MIT OR Apache-2.0 ("r-efi", "MIT OR Apache-2.0 OR LGPL-2.1-or-later"), // LGPL is not acceptable, but we use it under MIT OR Apache-2.0
]; ];
/// Placeholder for non-standard license file. /// Placeholder for non-standard license file.

View file

@ -1,6 +1,6 @@
//! Optional checks for file types other than Rust source //! Optional checks for file types other than Rust source
//! //!
//! Handles python tool version managment via a virtual environment in //! Handles python tool version management via a virtual environment in
//! `build/venv`. //! `build/venv`.
//! //!
//! # Functional outline //! # Functional outline

View file

@ -1,9 +0,0 @@
//@ known-bug: #132766
trait Trait {}
impl<'a> Trait for () {
fn pass2<'a>() -> impl Trait2 {}
}
trait Trait2 {}
impl Trait2 for () {}

View file

@ -0,0 +1,19 @@
// This uses edition 2024 for new lifetime capture rules.
//@ edition: 2024
// The problem here is that the presence of the opaque which captures all lifetimes in scope
// means that the duplicated `'a` (which I'll call the dupe) is considered to be *early-bound*
// since it shows up in the output but not the inputs. This is paired with the fact that we
// were previously setting the name of the dupe to `'_` in the generic param definition, which
// means that the identity args for the function were `['a#0, '_#1]` even though the lifetime
// for the dupe should've been `'a#1`. This difference in symbol meant that NLL couldn't
// actually match the lifetime against the identity lifetimes, leading to an ICE.
struct Foo<'a>(&'a ());
impl<'a> Foo<'a> {
fn pass<'a>() -> impl Sized {}
//~^ ERROR lifetime name `'a` shadows a lifetime name that is already in scope
}
fn main() {}

View file

@ -0,0 +1,11 @@
error[E0496]: lifetime name `'a` shadows a lifetime name that is already in scope
--> $DIR/captured-invalid-lifetime.rs:15:13
|
LL | impl<'a> Foo<'a> {
| -- first declared here
LL | fn pass<'a>() -> impl Sized {}
| ^^ lifetime `'a` already in scope
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0496`.

View file

@ -146,13 +146,13 @@ hir-stats Variant 144 ( 1.6%) 2 72
hir-stats GenericBound 256 ( 2.9%) 4 64 hir-stats GenericBound 256 ( 2.9%) 4 64
hir-stats - Trait 256 ( 2.9%) 4 hir-stats - Trait 256 ( 2.9%) 4
hir-stats Block 288 ( 3.2%) 6 48 hir-stats Block 288 ( 3.2%) 6 48
hir-stats GenericParam 360 ( 4.0%) 5 72
hir-stats Pat 360 ( 4.0%) 5 72 hir-stats Pat 360 ( 4.0%) 5 72
hir-stats - Struct 72 ( 0.8%) 1 hir-stats - Struct 72 ( 0.8%) 1
hir-stats - Wild 72 ( 0.8%) 1 hir-stats - Wild 72 ( 0.8%) 1
hir-stats - Binding 216 ( 2.4%) 3 hir-stats - Binding 216 ( 2.4%) 3
hir-stats Generics 560 ( 6.3%) 10 56 hir-stats GenericParam 400 ( 4.5%) 5 80
hir-stats Ty 720 ( 8.1%) 15 48 hir-stats Generics 560 ( 6.2%) 10 56
hir-stats Ty 720 ( 8.0%) 15 48
hir-stats - Ptr 48 ( 0.5%) 1 hir-stats - Ptr 48 ( 0.5%) 1
hir-stats - Ref 48 ( 0.5%) 1 hir-stats - Ref 48 ( 0.5%) 1
hir-stats - Path 624 ( 7.0%) 13 hir-stats - Path 624 ( 7.0%) 13
@ -171,8 +171,8 @@ hir-stats - Impl 88 ( 1.0%) 1
hir-stats - Trait 88 ( 1.0%) 1 hir-stats - Trait 88 ( 1.0%) 1
hir-stats - Fn 176 ( 2.0%) 2 hir-stats - Fn 176 ( 2.0%) 2
hir-stats - Use 352 ( 3.9%) 4 hir-stats - Use 352 ( 3.9%) 4
hir-stats Path 1_240 (13.9%) 31 40 hir-stats Path 1_240 (13.8%) 31 40
hir-stats PathSegment 1_920 (21.5%) 40 48 hir-stats PathSegment 1_920 (21.4%) 40 48
hir-stats ---------------------------------------------------------------- hir-stats ----------------------------------------------------------------
hir-stats Total 8_936 180 hir-stats Total 8_976 180
hir-stats hir-stats