Rollup merge of #110417 - jsoref:spelling-compiler, r=Nilstrieb

Spelling compiler

This is per https://github.com/rust-lang/rust/pull/110392#issuecomment-1510193656

I'm going to delay performing a squash because I really don't expect people to be perfectly happy w/ my changes, I really am a human and I really do make mistakes.

r? Nilstrieb

I'm going to be flying this evening, but I should be able to squash / respond to reviews w/in a day or two.

I tried to be careful about dropping changes to `tests`, afaict only two files had changes that were likely related to the changes for a given commit (this is where not having eagerly squashed should have given me an advantage), but, that said, picking things apart can be error prone.
This commit is contained in:
Guillaume Gomez 2023-04-18 14:50:51 +02:00 committed by GitHub
commit aa87addfb3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
101 changed files with 159 additions and 159 deletions

View file

@ -231,7 +231,7 @@ ast_passes_feature_on_non_nightly = `#![feature]` may not be used on the {$chann
.suggestion = remove the attribute .suggestion = remove the attribute
.stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable .stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable
ast_passes_incompatbile_features = `{$f1}` and `{$f2}` are incompatible, using them at the same time is not allowed ast_passes_incompatible_features = `{$f1}` and `{$f2}` are incompatible, using them at the same time is not allowed
.help = remove one of these features .help = remove one of these features
ast_passes_show_span = {$msg} ast_passes_show_span = {$msg}

View file

@ -677,7 +677,7 @@ impl AddToDiagnostic for StableFeature {
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(ast_passes_incompatbile_features)] #[diag(ast_passes_incompatible_features)]
#[help] #[help]
pub struct IncompatibleFeatures { pub struct IncompatibleFeatures {
#[primary_span] #[primary_span]

View file

@ -1147,7 +1147,7 @@ pub fn mut_borrow_of_mutable_ref(local_decl: &LocalDecl<'_>, local_name: Option<
// suggest removing the `&mut`. // suggest removing the `&mut`.
// //
// Deliberately fall into this case for all implicit self types, // Deliberately fall into this case for all implicit self types,
// so that we don't fall in to the next case with them. // so that we don't fall into the next case with them.
kind == hir::ImplicitSelfKind::MutRef kind == hir::ImplicitSelfKind::MutRef
} }
_ if Some(kw::SelfLower) == local_name => { _ if Some(kw::SelfLower) == local_name => {
@ -1235,7 +1235,7 @@ fn suggest_ampmut<'tcx>(
} }
} }
let (suggestability, highlight_span) = match opt_ty_info { let (suggestibility, highlight_span) = match opt_ty_info {
// if this is a variable binding with an explicit type, // if this is a variable binding with an explicit type,
// try to highlight that for the suggestion. // try to highlight that for the suggestion.
Some(ty_span) => (true, ty_span), Some(ty_span) => (true, ty_span),
@ -1256,7 +1256,7 @@ fn suggest_ampmut<'tcx>(
let ty_mut = local_decl.ty.builtin_deref(true).unwrap(); let ty_mut = local_decl.ty.builtin_deref(true).unwrap();
assert_eq!(ty_mut.mutbl, hir::Mutability::Not); assert_eq!(ty_mut.mutbl, hir::Mutability::Not);
( (
suggestability, suggestibility,
highlight_span, highlight_span,
if local_decl.ty.is_ref() { if local_decl.ty.is_ref() {
format!("&mut {}", ty_mut.ty) format!("&mut {}", ty_mut.ty)

View file

@ -22,9 +22,9 @@ pub fn insert_reference_to_gdb_debug_scripts_section_global(bx: &mut Builder<'_,
bx.const_bitcast(get_or_insert_gdb_debug_scripts_section_global(bx), bx.type_i8p()); bx.const_bitcast(get_or_insert_gdb_debug_scripts_section_global(bx), bx.type_i8p());
// Load just the first byte as that's all that's necessary to force // Load just the first byte as that's all that's necessary to force
// LLVM to keep around the reference to the global. // LLVM to keep around the reference to the global.
let volative_load_instruction = bx.volatile_load(bx.type_i8(), gdb_debug_scripts_section); let volatile_load_instruction = bx.volatile_load(bx.type_i8(), gdb_debug_scripts_section);
unsafe { unsafe {
llvm::LLVMSetAlignment(volative_load_instruction, 1); llvm::LLVMSetAlignment(volatile_load_instruction, 1);
} }
} }
} }

View file

@ -62,7 +62,7 @@ const SINGLE_VARIANT_VIRTUAL_DISR: u64 = 0;
/// In CPP-like mode, we generate a union with a field for each variant and an /// In CPP-like mode, we generate a union with a field for each variant and an
/// explicit tag field. The field of each variant has a struct type /// explicit tag field. The field of each variant has a struct type
/// that encodes the discrimiant of the variant and it's data layout. /// that encodes the discriminant of the variant and it's data layout.
/// The union also has a nested enumeration type that is only used for encoding /// The union also has a nested enumeration type that is only used for encoding
/// variant names in an efficient way. Its enumerator values do _not_ correspond /// variant names in an efficient way. Its enumerator values do _not_ correspond
/// to the enum's discriminant values. /// to the enum's discriminant values.

View file

@ -69,7 +69,7 @@ mod declare;
mod errors; mod errors;
mod intrinsic; mod intrinsic;
// The following is a work around that replaces `pub mod llvm;` and that fixes issue 53912. // The following is a workaround that replaces `pub mod llvm;` and that fixes issue 53912.
#[path = "llvm/mod.rs"] #[path = "llvm/mod.rs"]
mod llvm_; mod llvm_;
pub mod llvm { pub mod llvm {

View file

@ -148,7 +148,7 @@ codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` fa
codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error} codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
codegen_ssa_stripping_debu_info_failed = stripping debug info with `{$util}` failed: {$status} codegen_ssa_stripping_debug_info_failed = stripping debug info with `{$util}` failed: {$status}
.note = {$output} .note = {$output}
codegen_ssa_unable_to_run = unable to run `{$util}`: {$error} codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}

View file

@ -872,7 +872,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| { let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| {
let source_file = in_incr_comp_dir(&incr_comp_session_dir, saved_path); let source_file = in_incr_comp_dir(&incr_comp_session_dir, saved_path);
debug!( debug!(
"copying pre-existing module `{}` from {:?} to {}", "copying preexisting module `{}` from {:?} to {}",
module.name, module.name,
source_file, source_file,
output_path.display() output_path.display()

View file

@ -156,7 +156,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
None => { None => {
// Unfortunately, unconditionally using `llvm.used` causes // Unfortunately, unconditionally using `llvm.used` causes
// issues in handling `.init_array` with the gold linker, // issues in handling `.init_array` with the gold linker,
// but using `llvm.compiler.used` caused a nontrival amount // but using `llvm.compiler.used` caused a nontrivial amount
// of unintentional ecosystem breakage -- particularly on // of unintentional ecosystem breakage -- particularly on
// Mach-O targets. // Mach-O targets.
// //

View file

@ -424,7 +424,7 @@ pub struct UnableToRunDsymutil {
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(codegen_ssa_stripping_debu_info_failed)] #[diag(codegen_ssa_stripping_debug_info_failed)]
#[note] #[note]
pub struct StrippingDebugInfoFailed<'a> { pub struct StrippingDebugInfoFailed<'a> {
pub util: &'a str, pub util: &'a str,

View file

@ -784,7 +784,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
Abi::Scalar(scalar_layout) => { Abi::Scalar(scalar_layout) => {
if !scalar_layout.is_uninit_valid() { if !scalar_layout.is_uninit_valid() {
// There is something to check here. // There is something to check here.
let scalar = self.read_scalar(op, "initiailized scalar value")?; let scalar = self.read_scalar(op, "initialized scalar value")?;
self.visit_scalar(scalar, scalar_layout)?; self.visit_scalar(scalar, scalar_layout)?;
} }
} }
@ -794,7 +794,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// the other must be init. // the other must be init.
if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() { if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
let (a, b) = let (a, b) =
self.read_immediate(op, "initiailized scalar value")?.to_scalar_pair(); self.read_immediate(op, "initialized scalar value")?.to_scalar_pair();
self.visit_scalar(a, a_layout)?; self.visit_scalar(a, a_layout)?;
self.visit_scalar(b, b_layout)?; self.visit_scalar(b, b_layout)?;
} }

View file

@ -262,7 +262,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// We sometimes have to use `defining_opaque_types` for subtyping // We sometimes have to use `defining_opaque_types` for subtyping
// to succeed here and figuring out how exactly that should work // to succeed here and figuring out how exactly that should work
// is annoying. It is harmless enough to just not validate anything // is annoying. It is harmless enough to just not validate anything
// in that case. We still check this after analysis as all opque // in that case. We still check this after analysis as all opaque
// types have been revealed at this point. // types have been revealed at this point.
if (src, dest).has_opaque_types() { if (src, dest).has_opaque_types() {
return true; return true;

View file

@ -557,7 +557,7 @@ impl SelfProfiler {
let crate_name = crate_name.unwrap_or("unknown-crate"); let crate_name = crate_name.unwrap_or("unknown-crate");
// HACK(eddyb) we need to pad the PID, strange as it may seem, as its // HACK(eddyb) we need to pad the PID, strange as it may seem, as its
// length can behave as a source of entropy for heap addresses, when // length can behave as a source of entropy for heap addresses, when
// ASLR is disabled and the heap is otherwise determinic. // ASLR is disabled and the heap is otherwise deterministic.
let pid: u32 = process::id(); let pid: u32 = process::id();
let filename = format!("{crate_name}-{pid:07}.rustc_profile"); let filename = format!("{crate_name}-{pid:07}.rustc_profile");
let path = output_directory.join(&filename); let path = output_directory.join(&filename);

View file

@ -1,4 +1,4 @@
A struct pattern attempted to extract a non-existent field from a struct. A struct pattern attempted to extract a nonexistent field from a struct.
Erroneous code example: Erroneous code example:

View file

@ -32,7 +32,7 @@ error: [-, o]
This error is deliberately triggered with the `#[rustc_variance]` attribute This error is deliberately triggered with the `#[rustc_variance]` attribute
(`#![feature(rustc_attrs)]` must be enabled) and helps to show you the variance (`#![feature(rustc_attrs)]` must be enabled) and helps to show you the variance
of the type's generic parameters. You can read more about variance and of the type's generic parameters. You can read more about variance and
subtyping in [this section of the Rustnomicon]. For a more in depth look at subtyping in [this section of the Rustonomicon]. For a more in depth look at
variance (including a more complete list of common variances) see variance (including a more complete list of common variances) see
[this section of the Reference]. For information on how variance is implemented [this section of the Reference]. For information on how variance is implemented
in the compiler, see [this section of `rustc-dev-guide`]. in the compiler, see [this section of `rustc-dev-guide`].
@ -41,6 +41,6 @@ This error can be easily fixed by removing the `#[rustc_variance]` attribute,
the compiler's suggestion to comment it out can be applied automatically with the compiler's suggestion to comment it out can be applied automatically with
`rustfix`. `rustfix`.
[this section of the Rustnomicon]: https://doc.rust-lang.org/nomicon/subtyping.html [this section of the Rustonomicon]: https://doc.rust-lang.org/nomicon/subtyping.html
[this section of the Reference]: https://doc.rust-lang.org/reference/subtyping.html#variance [this section of the Reference]: https://doc.rust-lang.org/reference/subtyping.html#variance
[this section of `rustc-dev-guide`]: https://rustc-dev-guide.rust-lang.org/variance.html [this section of `rustc-dev-guide`]: https://rustc-dev-guide.rust-lang.org/variance.html

View file

@ -29,7 +29,7 @@ If `no_restriction()` were to use `&T` instead of `&()` as an argument, the
compiler would have added an implied bound, causing this to compile. compiler would have added an implied bound, causing this to compile.
This error can be resolved by explicitly naming the elided lifetime for `x` and This error can be resolved by explicitly naming the elided lifetime for `x` and
then explicily requiring that the generic parameter `T` outlives that lifetime: then explicitly requiring that the generic parameter `T` outlives that lifetime:
``` ```
fn no_restriction<'a, T: 'a>(x: &'a ()) -> &'a () { fn no_restriction<'a, T: 'a>(x: &'a ()) -> &'a () {

View file

@ -1,6 +1,6 @@
Plugin `..` only found in rlib format, but must be available in dylib format. Plugin `..` only found in rlib format, but must be available in dylib format.
Erroronous code example: Erroneous code example:
`rlib-plugin.rs` `rlib-plugin.rs`
```ignore (needs-linkage-with-other-tests) ```ignore (needs-linkage-with-other-tests)

View file

@ -10,7 +10,7 @@ trait Hello {
} }
``` ```
In this example, we tried to use the non-existent associated type `You` of the In this example, we tried to use the nonexistent associated type `You` of the
`Hello` trait. To fix this error, use an existing associated type: `Hello` trait. To fix this error, use an existing associated type:
``` ```

View file

@ -1,4 +1,4 @@
Attempted to access a non-existent field in a struct. Attempted to access a nonexistent field in a struct.
Erroneous code example: Erroneous code example:

View file

@ -1980,7 +1980,7 @@ impl EmitterWriter {
} }
if let DisplaySuggestion::Add = show_code_change && is_item_attribute { if let DisplaySuggestion::Add = show_code_change && is_item_attribute {
// The suggestion adds an entire line of code, ending on a newline, so we'll also // The suggestion adds an entire line of code, ending on a newline, so we'll also
// print the *following* line, to provide context of what we're advicing people to // print the *following* line, to provide context of what we're advising people to
// do. Otherwise you would only see contextless code that can be confused for // do. Otherwise you would only see contextless code that can be confused for
// already existing code, despite the colors and UI elements. // already existing code, despite the colors and UI elements.
// We special case `#[derive(_)]\n` and other attribute suggestions, because those // We special case `#[derive(_)]\n` and other attribute suggestions, because those

View file

@ -341,7 +341,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
Success(named_matches) => { Success(named_matches) => {
debug!("Parsed arm successfully"); debug!("Parsed arm successfully");
// The matcher was `Success(..)`ful. // The matcher was `Success(..)`ful.
// Merge the gated spans from parsing the matcher with the pre-existing ones. // Merge the gated spans from parsing the matcher with the preexisting ones.
sess.gated_spans.merge(gated_spans_snapshot); sess.gated_spans.merge(gated_spans_snapshot);
return Ok((i, named_matches)); return Ok((i, named_matches));
@ -873,7 +873,7 @@ impl<'tt> FirstSets<'tt> {
} }
} }
// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined // Most `mbe::TokenTree`s are preexisting in the matcher, but some are defined
// implicitly, such as opening/closing delimiters and sequence repetition ops. // implicitly, such as opening/closing delimiters and sequence repetition ops.
// This type encapsulates both kinds. It implements `Clone` while avoiding the // This type encapsulates both kinds. It implements `Clone` while avoiding the
// need for `mbe::TokenTree` to implement `Clone`. // need for `mbe::TokenTree` to implement `Clone`.

View file

@ -513,7 +513,7 @@ error: foo
} }
#[test] #[test]
fn non_overlaping() { fn non_overlapping() {
test_harness( test_harness(
r#" r#"
fn foo() { fn foo() {
@ -552,7 +552,7 @@ error: foo
} }
#[test] #[test]
fn overlaping_start_and_end() { fn overlapping_start_and_end() {
test_harness( test_harness(
r#" r#"
fn foo() { fn foo() {

View file

@ -139,7 +139,7 @@ declare_features! (
/// Allows using `#[on_unimplemented(..)]` on traits. /// Allows using `#[on_unimplemented(..)]` on traits.
/// (Moved to `rustc_attrs`.) /// (Moved to `rustc_attrs`.)
(removed, on_unimplemented, "1.40.0", None, None, None), (removed, on_unimplemented, "1.40.0", None, None, None),
/// A way to temporarily opt out of opt in copy. This will *never* be accepted. /// A way to temporarily opt out of opt-in copy. This will *never* be accepted.
(removed, opt_out_copy, "1.0.0", None, None, None), (removed, opt_out_copy, "1.0.0", None, None, None),
/// Allows features specific to OIBIT (now called auto traits). /// Allows features specific to OIBIT (now called auto traits).
/// Renamed to `auto_traits`. /// Renamed to `auto_traits`.

View file

@ -2061,7 +2061,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
err.note("enum variants can't have type parameters"); err.note("enum variants can't have type parameters");
let type_name = tcx.item_name(adt_def.did()); let type_name = tcx.item_name(adt_def.did());
let msg = format!( let msg = format!(
"you might have meant to specity type parameters on enum \ "you might have meant to specify type parameters on enum \
`{type_name}`" `{type_name}`"
); );
let Some(args) = assoc_segment.args else { return; }; let Some(args) = assoc_segment.args else { return; };

View file

@ -82,7 +82,7 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
let cause = traits::ObligationCause::misc(span, impl_did); let cause = traits::ObligationCause::misc(span, impl_did);
match type_allowed_to_implement_copy(tcx, param_env, self_type, cause) { match type_allowed_to_implement_copy(tcx, param_env, self_type, cause) {
Ok(()) => {} Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => { Err(CopyImplementationError::InfringingFields(fields)) => {
let mut err = struct_span_err!( let mut err = struct_span_err!(
tcx.sess, tcx.sess,
span, span,

View file

@ -1333,7 +1333,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
// We may fail to resolve higher-ranked lifetimes that are mentioned by APIT. // We may fail to resolve higher-ranked lifetimes that are mentioned by APIT.
// AST-based resolution does not care for impl-trait desugaring, which are the // AST-based resolution does not care for impl-trait desugaring, which are the
// responibility of lowering. This may create a mismatch between the resolution // responsibility of lowering. This may create a mismatch between the resolution
// AST found (`region_def_id`) which points to HRTB, and what HIR allows. // AST found (`region_def_id`) which points to HRTB, and what HIR allows.
// ``` // ```
// fn foo(x: impl for<'a> Trait<'a, Assoc = impl Copy + 'a>) {} // fn foo(x: impl for<'a> Trait<'a, Assoc = impl Copy + 'a>) {}

View file

@ -976,7 +976,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Attempt to coerce an expression to a type, and return the /// Attempt to coerce an expression to a type, and return the
/// adjusted type of the expression, if successful. /// adjusted type of the expression, if successful.
/// Adjustments are only recorded if the coercion succeeded. /// Adjustments are only recorded if the coercion succeeded.
/// The expressions *must not* have any pre-existing adjustments. /// The expressions *must not* have any preexisting adjustments.
pub fn try_coerce( pub fn try_coerce(
&self, &self,
expr: &hir::Expr<'_>, expr: &hir::Expr<'_>,
@ -1340,7 +1340,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
} }
/// As an optimization, you can create a `CoerceMany` with a /// As an optimization, you can create a `CoerceMany` with a
/// pre-existing slice of expressions. In this case, you are /// preexisting slice of expressions. In this case, you are
/// expected to pass each element in the slice to `coerce(...)` in /// expected to pass each element in the slice to `coerce(...)` in
/// order. This is used with arrays in particular to avoid /// order. This is used with arrays in particular to avoid
/// needlessly cloning the slice. /// needlessly cloning the slice.

View file

@ -108,7 +108,7 @@ pub enum ExpectedReturnTypeLabel<'tcx> {
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(hir_typeck_missing_parentheses_in_range, code = "E0689")] #[diag(hir_typeck_missing_parentheses_in_range, code = "E0689")]
pub struct MissingParentheseInRange { pub struct MissingParenthesesInRange {
#[primary_span] #[primary_span]
#[label(hir_typeck_missing_parentheses_in_range)] #[label(hir_typeck_missing_parentheses_in_range)]
pub span: Span, pub span: Span,

View file

@ -827,7 +827,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
QPath::TypeRelative(ref qself, ref segment) => { QPath::TypeRelative(ref qself, ref segment) => {
// Don't use `self.to_ty`, since this will register a WF obligation. // Don't use `self.to_ty`, since this will register a WF obligation.
// If we're trying to call a non-existent method on a trait // If we're trying to call a nonexistent method on a trait
// (e.g. `MyTrait::missing_method`), then resolution will // (e.g. `MyTrait::missing_method`), then resolution will
// give us a `QPath::TypeRelative` with a trait object as // give us a `QPath::TypeRelative` with a trait object as
// `qself`. In that case, we want to avoid registering a WF obligation // `qself`. In that case, we want to avoid registering a WF obligation

View file

@ -330,7 +330,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// expression mentioned. /// expression mentioned.
/// ///
/// `blame_specific_arg_if_possible` will find the most-specific expression anywhere inside /// `blame_specific_arg_if_possible` will find the most-specific expression anywhere inside
/// the provided function call expression, and mark it as responsible for the fullfillment /// the provided function call expression, and mark it as responsible for the fulfillment
/// error. /// error.
fn blame_specific_arg_if_possible( fn blame_specific_arg_if_possible(
&self, &self,

View file

@ -794,7 +794,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return; return;
}; };
// get all where BoundPredicates here, because they are used in to cases below // get all where BoundPredicates here, because they are used in two cases below
let where_predicates = predicates let where_predicates = predicates
.iter() .iter()
.filter_map(|p| match p { .filter_map(|p| match p {

View file

@ -650,7 +650,7 @@ fn check_must_not_suspend_ty<'tcx>(
}, },
) )
} }
// If drop tracking is enabled, we want to look through references, since the referrent // If drop tracking is enabled, we want to look through references, since the referent
// may not be considered live across the await point. // may not be considered live across the await point.
ty::Ref(_region, ty, _mutability) if fcx.sess().opts.unstable_opts.drop_tracking => { ty::Ref(_region, ty, _mutability) if fcx.sess().opts.unstable_opts.drop_tracking => {
let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix); let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);

View file

@ -1530,7 +1530,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
); );
if pick.is_ok() { if pick.is_ok() {
let range_span = parent_expr.span.with_hi(expr.span.hi()); let range_span = parent_expr.span.with_hi(expr.span.hi());
tcx.sess.emit_err(errors::MissingParentheseInRange { tcx.sess.emit_err(errors::MissingParenthesesInRange {
span, span,
ty_str: ty_str.to_string(), ty_str: ty_str.to_string(),
method_name: item_name.as_str().to_string(), method_name: item_name.as_str().to_string(),

View file

@ -1659,7 +1659,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if tcx.sess.teach(&err.get_code().unwrap()) { if tcx.sess.teach(&err.get_code().unwrap()) {
err.note( err.note(
"This error indicates that a struct pattern attempted to \ "This error indicates that a struct pattern attempted to \
extract a non-existent field from a struct. Struct fields \ extract a nonexistent field from a struct. Struct fields \
are identified by the name used before the colon : so struct \ are identified by the name used before the colon : so struct \
patterns should resemble the declaration of the struct type \ patterns should resemble the declaration of the struct type \
being matched.\n\n\ being matched.\n\n\

View file

@ -223,7 +223,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id); let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id);
if should_do_rust_2021_incompatible_closure_captures_analysis(self.tcx, closure_hir_id) { if should_do_rust_2021_incompatible_closure_captures_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_anaysis(closure_def_id, body_id, capture_clause, span); self.perform_2229_migration_analysis(closure_def_id, body_id, capture_clause, span);
} }
let after_feature_tys = self.final_upvar_tys(closure_def_id); let after_feature_tys = self.final_upvar_tys(closure_def_id);
@ -731,7 +731,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Perform the migration analysis for RFC 2229, and emit lint /// Perform the migration analysis for RFC 2229, and emit lint
/// `disjoint_capture_drop_reorder` if needed. /// `disjoint_capture_drop_reorder` if needed.
fn perform_2229_migration_anaysis( fn perform_2229_migration_analysis(
&self, &self,
closure_def_id: LocalDefId, closure_def_id: LocalDefId,
body_id: hir::BodyId, body_id: hir::BodyId,

View file

@ -139,7 +139,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
return; return;
} }
// can't add `#[rustc_clean]` etc without opting in to this feature // can't add `#[rustc_clean]` etc without opting into this feature
if !tcx.features().rustc_attrs { if !tcx.features().rustc_attrs {
return; return;
} }

View file

@ -80,7 +80,7 @@ infer_subtype = ...so that the {$requirement ->
[no_else] `if` missing an `else` returns `()` [no_else] `if` missing an `else` returns `()`
[fn_main_correct_type] `main` function has the correct type [fn_main_correct_type] `main` function has the correct type
[fn_start_correct_type] `#[start]` function has the correct type [fn_start_correct_type] `#[start]` function has the correct type
[intristic_correct_type] intrinsic has the correct type [intrinsic_correct_type] intrinsic has the correct type
[method_correct_type] method receiver has the correct type [method_correct_type] method receiver has the correct type
*[other] types are compatible *[other] types are compatible
} }
@ -93,7 +93,7 @@ infer_subtype_2 = ...so that {$requirement ->
[no_else] `if` missing an `else` returns `()` [no_else] `if` missing an `else` returns `()`
[fn_main_correct_type] `main` function has the correct type [fn_main_correct_type] `main` function has the correct type
[fn_start_correct_type] `#[start]` function has the correct type [fn_start_correct_type] `#[start]` function has the correct type
[intristic_correct_type] intrinsic has the correct type [intrinsic_correct_type] intrinsic has the correct type
[method_correct_type] method receiver has the correct type [method_correct_type] method receiver has the correct type
*[other] types are compatible *[other] types are compatible
} }
@ -341,8 +341,8 @@ infer_await_note = calling an async function returns a future
infer_prlf_defined_with_sub = the lifetime `{$sub_symbol}` defined here... infer_prlf_defined_with_sub = the lifetime `{$sub_symbol}` defined here...
infer_prlf_defined_without_sub = the lifetime defined here... infer_prlf_defined_without_sub = the lifetime defined here...
infer_prlf_must_oultive_with_sup = ...must outlive the lifetime `{$sup_symbol}` defined here infer_prlf_must_outlive_with_sup = ...must outlive the lifetime `{$sup_symbol}` defined here
infer_prlf_must_oultive_without_sup = ...must outlive the lifetime defined here infer_prlf_must_outlive_without_sup = ...must outlive the lifetime defined here
infer_prlf_known_limitation = this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information) infer_prlf_known_limitation = this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)
infer_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds infer_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds
@ -380,7 +380,7 @@ infer_oc_no_else = `if` may be missing an `else` clause
infer_oc_no_diverge = `else` clause of `let...else` does not diverge infer_oc_no_diverge = `else` clause of `let...else` does not diverge
infer_oc_fn_main_correct_type = `main` function has wrong type infer_oc_fn_main_correct_type = `main` function has wrong type
infer_oc_fn_start_correct_type = `#[start]` function has wrong type infer_oc_fn_start_correct_type = `#[start]` function has wrong type
infer_oc_intristic_correct_type = intrinsic has wrong type infer_oc_intrinsic_correct_type = intrinsic has wrong type
infer_oc_method_correct_type = mismatched `self` parameter type infer_oc_method_correct_type = mismatched `self` parameter type
infer_oc_closure_selfref = closure/generator type that references itself infer_oc_closure_selfref = closure/generator type that references itself
infer_oc_cant_coerce = cannot coerce intrinsics to function pointers infer_oc_cant_coerce = cannot coerce intrinsics to function pointers

View file

@ -71,7 +71,7 @@ pub struct AmbiguousImpl<'a> {
// Copy of `AnnotationRequired` for E0284 // Copy of `AnnotationRequired` for E0284
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(infer_type_annotations_needed, code = "E0284")] #[diag(infer_type_annotations_needed, code = "E0284")]
pub struct AmbigousReturn<'a> { pub struct AmbiguousReturn<'a> {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
pub source_kind: &'static str, pub source_kind: &'static str,
@ -1085,7 +1085,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span, span: Span,
#[note(infer_prlf_defined_with_sub)] #[note(infer_prlf_defined_with_sub)]
sub_span: Span, sub_span: Span,
#[note(infer_prlf_must_oultive_with_sup)] #[note(infer_prlf_must_outlive_with_sup)]
sup_span: Span, sup_span: Span,
sub_symbol: Symbol, sub_symbol: Symbol,
sup_symbol: Symbol, sup_symbol: Symbol,
@ -1098,7 +1098,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span, span: Span,
#[note(infer_prlf_defined_with_sub)] #[note(infer_prlf_defined_with_sub)]
sub_span: Span, sub_span: Span,
#[note(infer_prlf_must_oultive_without_sup)] #[note(infer_prlf_must_outlive_without_sup)]
sup_span: Span, sup_span: Span,
sub_symbol: Symbol, sub_symbol: Symbol,
#[note(infer_prlf_known_limitation)] #[note(infer_prlf_known_limitation)]
@ -1110,7 +1110,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span, span: Span,
#[note(infer_prlf_defined_without_sub)] #[note(infer_prlf_defined_without_sub)]
sub_span: Span, sub_span: Span,
#[note(infer_prlf_must_oultive_with_sup)] #[note(infer_prlf_must_outlive_with_sup)]
sup_span: Span, sup_span: Span,
sup_symbol: Symbol, sup_symbol: Symbol,
#[note(infer_prlf_known_limitation)] #[note(infer_prlf_known_limitation)]
@ -1122,7 +1122,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span, span: Span,
#[note(infer_prlf_defined_without_sub)] #[note(infer_prlf_defined_without_sub)]
sub_span: Span, sub_span: Span,
#[note(infer_prlf_must_oultive_without_sup)] #[note(infer_prlf_must_outlive_without_sup)]
sup_span: Span, sup_span: Span,
#[note(infer_prlf_known_limitation)] #[note(infer_prlf_known_limitation)]
note: (), note: (),
@ -1488,8 +1488,8 @@ pub enum ObligationCauseFailureCode {
#[subdiagnostic] #[subdiagnostic]
subdiags: Vec<TypeErrorAdditionalDiags>, subdiags: Vec<TypeErrorAdditionalDiags>,
}, },
#[diag(infer_oc_intristic_correct_type, code = "E0308")] #[diag(infer_oc_intrinsic_correct_type, code = "E0308")]
IntristicCorrectType { IntrinsicCorrectType {
#[primary_span] #[primary_span]
span: Span, span: Span,
#[subdiagnostic] #[subdiagnostic]

View file

@ -467,11 +467,11 @@ impl<'tcx> InferCtxt<'tcx> {
} }
} }
GenericArgKind::Const(result_value) => { GenericArgKind::Const(result_value) => {
if let ty::ConstKind::Bound(debrujin, b) = result_value.kind() { if let ty::ConstKind::Bound(debruijn, b) = result_value.kind() {
// ...in which case we would set `canonical_vars[0]` to `Some(const X)`. // ...in which case we would set `canonical_vars[0]` to `Some(const X)`.
// We only allow a `ty::INNERMOST` index in substitutions. // We only allow a `ty::INNERMOST` index in substitutions.
assert_eq!(debrujin, ty::INNERMOST); assert_eq!(debruijn, ty::INNERMOST);
opt_values[b] = Some(*original_value); opt_values[b] = Some(*original_value);
} }
} }

View file

@ -832,7 +832,7 @@ pub trait ObligationEmittingRelation<'tcx>: TypeRelation<'tcx> {
/// Register predicates that must hold in order for this relation to hold. Uses /// Register predicates that must hold in order for this relation to hold. Uses
/// a default obligation cause, [`ObligationEmittingRelation::register_obligations`] should /// a default obligation cause, [`ObligationEmittingRelation::register_obligations`] should
/// be used if control over the obligaton causes is required. /// be used if control over the obligation causes is required.
fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ToPredicate<'tcx>>); fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ToPredicate<'tcx>>);
/// Register an obligation that both constants must be equal to each other. /// Register an obligation that both constants must be equal to each other.

View file

@ -178,7 +178,7 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> {
where where
T: Relate<'tcx>, T: Relate<'tcx>,
{ {
// A binder is equal to itself if it's structually equal to itself // A binder is equal to itself if it's structurally equal to itself
if a == b { if a == b {
return Ok(a); return Ok(a);
} }

View file

@ -2886,7 +2886,7 @@ impl<'tcx> ObligationCauseExt<'tcx> for ObligationCause<'tcx> {
LetElse => ObligationCauseFailureCode::NoDiverge { span, subdiags }, LetElse => ObligationCauseFailureCode::NoDiverge { span, subdiags },
MainFunctionType => ObligationCauseFailureCode::FnMainCorrectType { span }, MainFunctionType => ObligationCauseFailureCode::FnMainCorrectType { span },
StartFunctionType => ObligationCauseFailureCode::FnStartCorrectType { span, subdiags }, StartFunctionType => ObligationCauseFailureCode::FnStartCorrectType { span, subdiags },
IntrinsicType => ObligationCauseFailureCode::IntristicCorrectType { span, subdiags }, IntrinsicType => ObligationCauseFailureCode::IntrinsicCorrectType { span, subdiags },
MethodReceiver => ObligationCauseFailureCode::MethodCorrectType { span, subdiags }, MethodReceiver => ObligationCauseFailureCode::MethodCorrectType { span, subdiags },
// In the case where we have no more specific thing to // In the case where we have no more specific thing to
@ -2943,7 +2943,7 @@ impl IntoDiagnosticArg for ObligationCauseAsDiagArg<'_> {
IfExpressionWithNoElse => "no_else", IfExpressionWithNoElse => "no_else",
MainFunctionType => "fn_main_correct_type", MainFunctionType => "fn_main_correct_type",
StartFunctionType => "fn_start_correct_type", StartFunctionType => "fn_start_correct_type",
IntrinsicType => "intristic_correct_type", IntrinsicType => "intrinsic_correct_type",
MethodReceiver => "method_correct_type", MethodReceiver => "method_correct_type",
_ => "other", _ => "other",
} }

View file

@ -1,5 +1,5 @@
use crate::errors::{ use crate::errors::{
AmbigousReturn, AmbiguousImpl, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator, AmbiguousImpl, AmbiguousReturn, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
SourceKindMultiSuggestion, SourceKindSubdiag, SourceKindMultiSuggestion, SourceKindSubdiag,
}; };
use crate::infer::error_reporting::TypeErrCtxt; use crate::infer::error_reporting::TypeErrCtxt;
@ -368,7 +368,7 @@ impl<'tcx> InferCtxt<'tcx> {
bad_label, bad_label,
} }
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic), .into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0284 => AmbigousReturn { TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span, span,
source_kind, source_kind,
source_name, source_name,
@ -573,7 +573,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
bad_label: None, bad_label: None,
} }
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic), .into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0284 => AmbigousReturn { TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span, span,
source_kind, source_kind,
source_name: &name, source_name: &name,

View file

@ -42,7 +42,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
// Next, we instantiate each bound region in the subtype // Next, we instantiate each bound region in the subtype
// with a fresh region variable. These region variables -- // with a fresh region variable. These region variables --
// but no other pre-existing region variables -- can name // but no other preexisting region variables -- can name
// the placeholders. // the placeholders.
let sub_prime = self.infcx.instantiate_binder_with_fresh_vars(span, HigherRankedType, sub); let sub_prime = self.infcx.instantiate_binder_with_fresh_vars(span, HigherRankedType, sub);

View file

@ -210,7 +210,7 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
where where
T: Relate<'tcx>, T: Relate<'tcx>,
{ {
// A binder is always a subtype of itself if it's structually equal to itself // A binder is always a subtype of itself if it's structurally equal to itself
if a == b { if a == b {
return Ok(a); return Ok(a);
} }

View file

@ -103,7 +103,7 @@ pub enum ProjectionCacheEntry<'tcx> {
/// if this field is set. Evaluation only /// if this field is set. Evaluation only
/// cares about the final result, so we don't /// cares about the final result, so we don't
/// care about any region constraint side-effects /// care about any region constraint side-effects
/// produced by evaluating the sub-boligations. /// produced by evaluating the sub-obligations.
/// ///
/// Additionally, we will clear out the sub-obligations /// Additionally, we will clear out the sub-obligations
/// entirely if we ever evaluate the cache entry (along /// entirely if we ever evaluate the cache entry (along

View file

@ -22,7 +22,7 @@
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
use crate::{ use crate::{
errors::BuiltinEllpisisInclusiveRangePatterns, errors::BuiltinEllipsisInclusiveRangePatterns,
lints::{ lints::{
BuiltinAnonymousParams, BuiltinBoxPointers, BuiltinClashingExtern, BuiltinAnonymousParams, BuiltinBoxPointers, BuiltinClashingExtern,
BuiltinClashingExternSub, BuiltinConstNoMangle, BuiltinDeprecatedAttrLink, BuiltinClashingExternSub, BuiltinConstNoMangle, BuiltinDeprecatedAttrLink,
@ -1711,13 +1711,13 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
} }
} }
let (parenthesise, endpoints) = match &pat.kind { let (parentheses, endpoints) = match &pat.kind {
PatKind::Ref(subpat, _) => (true, matches_ellipsis_pat(&subpat)), PatKind::Ref(subpat, _) => (true, matches_ellipsis_pat(&subpat)),
_ => (false, matches_ellipsis_pat(pat)), _ => (false, matches_ellipsis_pat(pat)),
}; };
if let Some((start, end, join)) = endpoints { if let Some((start, end, join)) = endpoints {
if parenthesise { if parentheses {
self.node_id = Some(pat.id); self.node_id = Some(pat.id);
let end = expr_to_string(&end); let end = expr_to_string(&end);
let replace = match start { let replace = match start {
@ -1725,7 +1725,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
None => format!("&(..={})", end), None => format!("&(..={})", end),
}; };
if join.edition() >= Edition::Edition2021 { if join.edition() >= Edition::Edition2021 {
cx.sess().emit_err(BuiltinEllpisisInclusiveRangePatterns { cx.sess().emit_err(BuiltinEllipsisInclusiveRangePatterns {
span: pat.span, span: pat.span,
suggestion: pat.span, suggestion: pat.span,
replace, replace,
@ -1743,7 +1743,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
} else { } else {
let replace = "..="; let replace = "..=";
if join.edition() >= Edition::Edition2021 { if join.edition() >= Edition::Edition2021 {
cx.sess().emit_err(BuiltinEllpisisInclusiveRangePatterns { cx.sess().emit_err(BuiltinEllipsisInclusiveRangePatterns {
span: pat.span, span: pat.span,
suggestion: join, suggestion: join,
replace: replace.to_string(), replace: replace.to_string(),
@ -2560,7 +2560,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
.subst(cx.tcx, substs) .subst(cx.tcx, substs)
.apply_any_module(cx.tcx, cx.param_env) .apply_any_module(cx.tcx, cx.param_env)
{ {
// Entirely skip uninhbaited variants. // Entirely skip uninhabited variants.
Some(false) => return None, Some(false) => return None,
// Forward the others, but remember which ones are definitely inhabited. // Forward the others, but remember which ones are definitely inhabited.
Some(true) => true, Some(true) => true,

View file

@ -81,7 +81,7 @@ pub struct UnknownToolInScopedLint {
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(lint_builtin_ellipsis_inclusive_range_patterns, code = "E0783")] #[diag(lint_builtin_ellipsis_inclusive_range_patterns, code = "E0783")]
pub struct BuiltinEllpisisInclusiveRangePatterns { pub struct BuiltinEllipsisInclusiveRangePatterns {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
#[suggestion(style = "short", code = "{replace}", applicability = "machine-applicable")] #[suggestion(style = "short", code = "{replace}", applicability = "machine-applicable")]

View file

@ -1021,7 +1021,7 @@ declare_lint! {
declare_lint! { declare_lint! {
/// The `invalid_alignment` lint detects dereferences of misaligned pointers during /// The `invalid_alignment` lint detects dereferences of misaligned pointers during
/// constant evluation. /// constant evaluation.
/// ///
/// ### Example /// ### Example
/// ///
@ -1854,7 +1854,7 @@ declare_lint! {
/// When new methods are added to traits in the standard library, they are /// When new methods are added to traits in the standard library, they are
/// usually added in an "unstable" form which is only available on the /// usually added in an "unstable" form which is only available on the
/// [nightly channel] with a [`feature` attribute]. If there is any /// [nightly channel] with a [`feature` attribute]. If there is any
/// pre-existing code which extends a trait to have a method with the same /// preexisting code which extends a trait to have a method with the same
/// name, then the names will collide. In the future, when the method is /// name, then the names will collide. In the future, when the method is
/// stabilized, this will cause an error due to the ambiguity. This lint /// stabilized, this will cause an error due to the ambiguity. This lint
/// is an early-warning to let you know that there may be a collision in /// is an early-warning to let you know that there may be a collision in

View file

@ -811,7 +811,7 @@ LLVMRustOptimize(
ModulePassManager MPM; ModulePassManager MPM;
bool NeedThinLTOBufferPasses = UseThinLTOBuffers; bool NeedThinLTOBufferPasses = UseThinLTOBuffers;
if (!NoPrepopulatePasses) { if (!NoPrepopulatePasses) {
// The pre-link pipelines don't support O0 and require using budilO0DefaultPipeline() instead. // The pre-link pipelines don't support O0 and require using buildO0DefaultPipeline() instead.
// At the same time, the LTO pipelines do support O0 and using them is required. // At the same time, the LTO pipelines do support O0 and using them is required.
bool IsLTO = OptStage == LLVMRustOptStage::ThinLTO || OptStage == LLVMRustOptStage::FatLTO; bool IsLTO = OptStage == LLVMRustOptStage::ThinLTO || OptStage == LLVMRustOptStage::FatLTO;
if (OptLevel == OptimizationLevel::O0 && !IsLTO) { if (OptLevel == OptimizationLevel::O0 && !IsLTO) {

View file

@ -117,7 +117,7 @@ pub(crate) struct CrateMetadata {
/// Additional data used for decoding `HygieneData` (e.g. `SyntaxContext` /// Additional data used for decoding `HygieneData` (e.g. `SyntaxContext`
/// and `ExpnId`). /// and `ExpnId`).
/// Note that we store a `HygieneDecodeContext` for each `CrateMetadat`. This is /// Note that we store a `HygieneDecodeContext` for each `CrateMetadata`. This is
/// because `SyntaxContext` ids are not globally unique, so we need /// because `SyntaxContext` ids are not globally unique, so we need
/// to track which ids we've decoded on a per-crate basis. /// to track which ids we've decoded on a per-crate basis.
hygiene_context: HygieneDecodeContext, hygiene_context: HygieneDecodeContext,
@ -627,7 +627,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Symbol {
let pos = d.read_usize(); let pos = d.read_usize();
let old_pos = d.opaque.position(); let old_pos = d.opaque.position();
// move to str ofset and read // move to str offset and read
d.opaque.set_position(pos); d.opaque.set_position(pos);
let s = d.read_str(); let s = d.read_str();
let sym = Symbol::intern(s); let sym = Symbol::intern(s);

View file

@ -102,7 +102,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
/// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is /// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to /// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
/// some global state. /// some global state.
/// The `Debug` rendering is used to distplay bare provenance, and for the default impl of `fmt`. /// The `Debug` rendering is used to display bare provenance, and for the default impl of `fmt`.
pub trait Provenance: Copy + fmt::Debug { pub trait Provenance: Copy + fmt::Debug {
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address. /// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
/// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are /// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are

View file

@ -251,7 +251,7 @@ pub enum StatementKind<'tcx> {
/// **Needs clarification**: The implication of the above idea would be that assignment implies /// **Needs clarification**: The implication of the above idea would be that assignment implies
/// that the resulting value is initialized. I believe we could commit to this separately from /// that the resulting value is initialized. I believe we could commit to this separately from
/// committing to whatever part of the memory model we would need to decide on to make the above /// committing to whatever part of the memory model we would need to decide on to make the above
/// paragragh precise. Do we want to? /// paragraph precise. Do we want to?
/// ///
/// Assignments in which the types of the place and rvalue differ are not well-formed. /// Assignments in which the types of the place and rvalue differ are not well-formed.
/// ///
@ -997,7 +997,7 @@ pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>;
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this /// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model? /// something we can even decide without knowing more about Rust's memory model?
/// ///
/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri /// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the /// currently implements it, but it seems like this may be something to check against in the
/// validator. /// validator.
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)] #[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]

View file

@ -1,4 +1,4 @@
//! A subset of a mir body used for const evaluatability checking. //! A subset of a mir body used for const evaluability checking.
use crate::ty::{ use crate::ty::{
self, Const, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable, self, Const, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeVisitableExt, TypeVisitableExt,

View file

@ -68,7 +68,7 @@ pub enum TreatParams {
} }
/// During fast-rejection, we have the choice of treating projection types /// During fast-rejection, we have the choice of treating projection types
/// as either simplifyable or not, depending on whether we expect the projection /// as either simplifiable or not, depending on whether we expect the projection
/// to be normalized/rigid. /// to be normalized/rigid.
#[derive(PartialEq, Eq, Debug, Clone, Copy)] #[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum TreatProjections { pub enum TreatProjections {

View file

@ -235,7 +235,7 @@ impl IntoDiagnostic<'_, !> for LayoutError<'_> {
} }
} }
// FIXME: Once the other errors that embed this error have been converted to translateable // FIXME: Once the other errors that embed this error have been converted to translatable
// diagnostics, this Display impl should be removed. // diagnostics, this Display impl should be removed.
impl<'tcx> fmt::Display for LayoutError<'tcx> { impl<'tcx> fmt::Display for LayoutError<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -458,10 +458,10 @@ impl<'tcx> SizeSkeleton<'tcx> {
} }
} }
/// When creating the layout for types with abstract conts in their size (i.e. [usize; 4 * N]), /// When creating the layout for types with abstract consts in their size (i.e. [usize; 4 * N]),
/// to ensure that they have a canonical order and can be compared directly we combine all /// to ensure that they have a canonical order and can be compared directly we combine all
/// constants, and sort the other terms. This allows comparison of expressions of sizes, /// constants, and sort the other terms. This allows comparison of expressions of sizes,
/// allowing for things like transmutating between types that depend on generic consts. /// allowing for things like transmuting between types that depend on generic consts.
/// This returns `None` if multiplication of constants overflows. /// This returns `None` if multiplication of constants overflows.
fn mul_sorted_consts<'tcx>( fn mul_sorted_consts<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,

View file

@ -197,7 +197,7 @@ impl<'tcx> fmt::Debug for AliasTy<'tcx> {
// Atomic structs // Atomic structs
// //
// For things that don't carry any arena-allocated data (and are // For things that don't carry any arena-allocated data (and are
// copy...), just add them to one of these lists as appropriat. // copy...), just add them to one of these lists as appropriate.
// For things for which the type library provides traversal implementations // For things for which the type library provides traversal implementations
// for all Interners, we only need to provide a Lift implementation: // for all Interners, we only need to provide a Lift implementation:

View file

@ -642,7 +642,7 @@ impl<'tcx> TyCtxt<'tcx> {
} }
} }
/// Return the set of types that should be taken into accound when checking /// Return the set of types that should be taken into account when checking
/// trait bounds on a generator's internal state. /// trait bounds on a generator's internal state.
pub fn generator_hidden_types( pub fn generator_hidden_types(
self, self,
@ -1402,7 +1402,7 @@ pub fn is_trivially_const_drop(ty: Ty<'_>) -> bool {
} }
/// Does the equivalent of /// Does the equivalent of
/// ```ignore (ilustrative) /// ```ignore (illustrative)
/// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>(); /// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
/// folder.tcx().intern_*(&v) /// folder.tcx().intern_*(&v)
/// ``` /// ```

View file

@ -163,13 +163,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// //
// [block: If(lhs)] -true-> [else_block: dest = (rhs)] // [block: If(lhs)] -true-> [else_block: dest = (rhs)]
// | (false) // | (false)
// [shortcurcuit_block: dest = false] // [shortcircuit_block: dest = false]
// //
// Or: // Or:
// //
// [block: If(lhs)] -false-> [else_block: dest = (rhs)] // [block: If(lhs)] -false-> [else_block: dest = (rhs)]
// | (true) // | (true)
// [shortcurcuit_block: dest = true] // [shortcircuit_block: dest = true]
let (shortcircuit_block, mut else_block, join_block) = ( let (shortcircuit_block, mut else_block, join_block) = (
this.cfg.start_new_block(), this.cfg.start_new_block(),

View file

@ -77,7 +77,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| PatKind::Wild | PatKind::Wild
| PatKind::Binding { .. } | PatKind::Binding { .. }
| PatKind::Leaf { .. } | PatKind::Leaf { .. }
| PatKind::Deref { .. } => self.error_simplifyable(match_pair), | PatKind::Deref { .. } => self.error_simplifiable(match_pair),
} }
} }
@ -173,7 +173,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug_assert_ne!( debug_assert_ne!(
target_blocks[idx.index()], target_blocks[idx.index()],
otherwise_block, otherwise_block,
"no canididates for tested discriminant: {:?}", "no candidates for tested discriminant: {:?}",
discr, discr,
); );
Some((discr.val, target_blocks[idx.index()])) Some((discr.val, target_blocks[idx.index()]))
@ -181,7 +181,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug_assert_eq!( debug_assert_eq!(
target_blocks[idx.index()], target_blocks[idx.index()],
otherwise_block, otherwise_block,
"found canididates for untested discriminant: {:?}", "found candidates for untested discriminant: {:?}",
discr, discr,
); );
None None
@ -499,7 +499,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// However, in some cases, the test may just not be relevant to candidate. /// However, in some cases, the test may just not be relevant to candidate.
/// For example, suppose we are testing whether `foo.x == 22`, but in one /// For example, suppose we are testing whether `foo.x == 22`, but in one
/// match arm we have `Foo { x: _, ... }`... in that case, the test for /// match arm we have `Foo { x: _, ... }`... in that case, the test for
/// what value `x` has has no particular relevance to this candidate. In /// the value of `x` has no particular relevance to this candidate. In
/// such cases, this function just returns None without doing anything. /// such cases, this function just returns None without doing anything.
/// This is used by the overall `match_candidates` algorithm to structure /// This is used by the overall `match_candidates` algorithm to structure
/// the match as a whole. See `match_candidates` for more details. /// the match as a whole. See `match_candidates` for more details.
@ -763,8 +763,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
candidate.match_pairs.extend(consequent_match_pairs); candidate.match_pairs.extend(consequent_match_pairs);
} }
fn error_simplifyable<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> ! { fn error_simplifiable<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> ! {
span_bug!(match_pair.pattern.span, "simplifyable pattern found: {:?}", match_pair.pattern) span_bug!(match_pair.pattern.span, "simplifiable pattern found: {:?}", match_pair.pattern)
} }
fn const_range_contains( fn const_range_contains(

View file

@ -156,7 +156,7 @@ impl<'tcx> ConstToPat<'tcx> {
if let Some(non_sm_ty) = structural { if let Some(non_sm_ty) = structural {
if !self.type_may_have_partial_eq_impl(cv.ty()) { if !self.type_may_have_partial_eq_impl(cv.ty()) {
// fatal avoids ICE from resolution of non-existent method (rare case). // fatal avoids ICE from resolution of nonexistent method (rare case).
self.tcx() self.tcx()
.sess .sess
.emit_fatal(TypeNotStructural { span: self.span, non_sm_ty: non_sm_ty }); .emit_fatal(TypeNotStructural { span: self.span, non_sm_ty: non_sm_ty });

View file

@ -366,7 +366,7 @@ where
rustc_index::newtype_index!( rustc_index::newtype_index!(
/// This index uniquely identifies a place. /// This index uniquely identifies a place.
/// ///
/// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` corresponds to a tracked
/// place. However, every tracked place and all places along its projection have a `PlaceIndex`. /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
pub struct PlaceIndex {} pub struct PlaceIndex {}
); );

View file

@ -10,7 +10,7 @@ use rustc_middle::mir::patch::MirPatch;
/// they are dropped from an aligned address. /// they are dropped from an aligned address.
/// ///
/// For example, if we have something like /// For example, if we have something like
/// ```ignore (ilustrative) /// ```ignore (illustrative)
/// #[repr(packed)] /// #[repr(packed)]
/// struct Foo { /// struct Foo {
/// dealign: u8, /// dealign: u8,
@ -25,7 +25,7 @@ use rustc_middle::mir::patch::MirPatch;
/// its address is not aligned. /// its address is not aligned.
/// ///
/// Instead, we move `foo.data` to a local and drop that: /// Instead, we move `foo.data` to a local and drop that:
/// ```ignore (ilustrative) /// ```ignore (illustrative)
/// storage.live(drop_temp) /// storage.live(drop_temp)
/// drop_temp = foo.data; /// drop_temp = foo.data;
/// drop(drop_temp) -> next /// drop(drop_temp) -> next

View file

@ -59,7 +59,7 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
let basic_blocks = body.basic_blocks.as_mut(); let basic_blocks = body.basic_blocks.as_mut();
let local_decls = &body.local_decls; let local_decls = &body.local_decls;
let needs_retag = |place: &Place<'tcx>| { let needs_retag = |place: &Place<'tcx>| {
!place.has_deref() // we're not eally interested in stores to "outside" locations, they are hard to keep track of anyway !place.has_deref() // we're not really interested in stores to "outside" locations, they are hard to keep track of anyway
&& may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx) && may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx)
&& !local_decls[place.local].is_deref_temp() && !local_decls[place.local].is_deref_temp()
}; };

View file

@ -22,7 +22,7 @@ impl<'tcx> MirPass<'tcx> for ConstDebugInfo {
fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
trace!("running ConstDebugInfo on {:?}", body.source); trace!("running ConstDebugInfo on {:?}", body.source);
for (local, constant) in find_optimization_oportunities(body) { for (local, constant) in find_optimization_opportunities(body) {
for debuginfo in &mut body.var_debug_info { for debuginfo in &mut body.var_debug_info {
if let VarDebugInfoContents::Place(p) = debuginfo.value { if let VarDebugInfoContents::Place(p) = debuginfo.value {
if p.local == local && p.projection.is_empty() { if p.local == local && p.projection.is_empty() {
@ -45,7 +45,7 @@ struct LocalUseVisitor {
local_assignment_locations: IndexVec<Local, Option<Location>>, local_assignment_locations: IndexVec<Local, Option<Location>>,
} }
fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Constant<'tcx>)> { fn find_optimization_opportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Constant<'tcx>)> {
let mut visitor = LocalUseVisitor { let mut visitor = LocalUseVisitor {
local_mutating_uses: IndexVec::from_elem(0, &body.local_decls), local_mutating_uses: IndexVec::from_elem(0, &body.local_decls),
local_assignment_locations: IndexVec::from_elem(None, &body.local_decls), local_assignment_locations: IndexVec::from_elem(None, &body.local_decls),

View file

@ -826,7 +826,7 @@ impl Visitor<'_> for CanConstProp {
| NonMutatingUse(NonMutatingUseContext::AddressOf) | NonMutatingUse(NonMutatingUseContext::AddressOf)
| MutatingUse(MutatingUseContext::Borrow) | MutatingUse(MutatingUseContext::Borrow)
| MutatingUse(MutatingUseContext::AddressOf) => { | MutatingUse(MutatingUseContext::AddressOf) => {
trace!("local {:?} can't be propagaged because it's used: {:?}", local, context); trace!("local {:?} can't be propagated because it's used: {:?}", local, context);
self.can_const_prop[local] = ConstPropMode::NoPropagation; self.can_const_prop[local] = ConstPropMode::NoPropagation;
} }
} }

View file

@ -111,7 +111,7 @@ impl CoverageGraph {
if predecessors.len() > 1 { if predecessors.len() > 1 {
"predecessors.len() > 1".to_owned() "predecessors.len() > 1".to_owned()
} else { } else {
format!("bb {} is not in precessors: {:?}", bb.index(), predecessors) format!("bb {} is not in predecessors: {:?}", bb.index(), predecessors)
} }
); );
} }

View file

@ -351,7 +351,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
} }
(FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom), (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
(_, _) => { (_, _) => {
// Could attempt some algebraic simplifcations here. // Could attempt some algebraic simplifications here.
(FlatSet::Top, FlatSet::Top) (FlatSet::Top, FlatSet::Top)
} }
} }

View file

@ -69,7 +69,7 @@
//! of this is that such liveness analysis can report more accurate results about whole locals at //! of this is that such liveness analysis can report more accurate results about whole locals at
//! a time. For example, consider: //! a time. For example, consider:
//! //!
//! ```ignore (syntax-highliting-only) //! ```ignore (syntax-highlighting-only)
//! _1 = u; //! _1 = u;
//! // unrelated code //! // unrelated code
//! _1.f1 = v; //! _1.f1 = v;
@ -360,7 +360,7 @@ struct FilterInformation<'a, 'body, 'alloc, 'tcx> {
} }
// We first implement some utility functions which we will expose removing candidates according to // We first implement some utility functions which we will expose removing candidates according to
// different needs. Throughout the livenss filtering, the `candidates` are only ever accessed // different needs. Throughout the liveness filtering, the `candidates` are only ever accessed
// through these methods, and not directly. // through these methods, and not directly.
impl<'alloc> Candidates<'alloc> { impl<'alloc> Candidates<'alloc> {
/// Just `Vec::retain`, but the condition is inverted and we add debugging output /// Just `Vec::retain`, but the condition is inverted and we add debugging output

View file

@ -24,7 +24,7 @@ use std::fmt;
/// In general, the compiler cannot determine at compile time whether a destructor will run or not. /// In general, the compiler cannot determine at compile time whether a destructor will run or not.
/// ///
/// At a high level, this pass refines Drop to only run the destructor if the /// At a high level, this pass refines Drop to only run the destructor if the
/// target is initialized. The way this is achievied is by inserting drop flags for every variable /// target is initialized. The way this is achieved is by inserting drop flags for every variable
/// that may be dropped, and then using those flags to determine whether a destructor should run. /// that may be dropped, and then using those flags to determine whether a destructor should run.
/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or /// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
/// "drop shim" for the type of the dropped place. /// "drop shim" for the type of the dropped place.

View file

@ -1869,7 +1869,7 @@ fn check_must_not_suspend_ty<'tcx>(
}, },
) )
} }
// If drop tracking is enabled, we want to look through references, since the referrent // If drop tracking is enabled, we want to look through references, since the referent
// may not be considered live across the await point. // may not be considered live across the await point.
ty::Ref(_region, ty, _mutability) => { ty::Ref(_region, ty, _mutability) => {
let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix); let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);

View file

@ -21,7 +21,7 @@ pub struct SsaLocals {
/// We often encounter MIR bodies with 1 or 2 basic blocks. In those cases, it's unnecessary to /// We often encounter MIR bodies with 1 or 2 basic blocks. In those cases, it's unnecessary to
/// actually compute dominators, we can just compare block indices because bb0 is always the first /// actually compute dominators, we can just compare block indices because bb0 is always the first
/// block, and in any body all other blocks are always always dominated by bb0. /// block, and in any body all other blocks are always dominated by bb0.
struct SmallDominators { struct SmallDominators {
inner: Option<Dominators<BasicBlock>>, inner: Option<Dominators<BasicBlock>>,
} }

View file

@ -402,7 +402,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem<
} }
/// Collect all monomorphized items reachable from `starting_point`, and emit a note diagnostic if a /// Collect all monomorphized items reachable from `starting_point`, and emit a note diagnostic if a
/// post-monorphization error is encountered during a collection step. /// post-monomorphization error is encountered during a collection step.
#[instrument(skip(tcx, visited, recursion_depths, recursion_limit, inlining_map), level = "debug")] #[instrument(skip(tcx, visited, recursion_depths, recursion_limit, inlining_map), level = "debug")]
fn collect_items_rec<'tcx>( fn collect_items_rec<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,

View file

@ -424,7 +424,7 @@ fn mono_item_visibility<'tcx>(
InstanceDef::Item(def) => def.did, InstanceDef::Item(def) => def.did,
InstanceDef::DropGlue(def_id, Some(_)) => def_id, InstanceDef::DropGlue(def_id, Some(_)) => def_id,
// We match the visiblity of statics here // We match the visibility of statics here
InstanceDef::ThreadLocalShim(def_id) => { InstanceDef::ThreadLocalShim(def_id) => {
return static_visibility(tcx, can_be_internalized, def_id); return static_visibility(tcx, can_be_internalized, def_id);
} }

View file

@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
match token_trees { match token_trees {
Ok(stream) if unmatched_delims.is_empty() => Ok(stream), Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
_ => { _ => {
// Return error if there are unmatched delimiters or unclosng delimiters. // Return error if there are unmatched delimiters or unclosed delimiters.
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
// because the delimiter mismatch is more likely to be the root cause of error // because the delimiter mismatch is more likely to be the root cause of error

View file

@ -68,7 +68,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
} }
} else { } else {
// The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can // The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
// happen with e.g. `#[foo = include_str!("non-existent-file.rs")]`; in that // happen with e.g. `#[foo = include_str!("nonexistent-file.rs")]`; in that
// case we delay the error because an earlier error will have already been // case we delay the error because an earlier error will have already been
// reported. // reported.
let msg = format!("unexpected expression: `{}`", pprust::expr_to_string(expr)); let msg = format!("unexpected expression: `{}`", pprust::expr_to_string(expr));

View file

@ -744,7 +744,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Symbol {
let pos = d.read_usize(); let pos = d.read_usize();
let old_pos = d.opaque.position(); let old_pos = d.opaque.position();
// move to str ofset and read // move to str offset and read
d.opaque.set_position(pos); d.opaque.set_position(pos);
let s = d.read_str(); let s = d.read_str();
let sym = Symbol::intern(s); let sym = Symbol::intern(s);

View file

@ -1,4 +1,4 @@
query_system_reentrant = internal compiler error: re-entrant incremental verify failure, suppressing message query_system_reentrant = internal compiler error: reentrant incremental verify failure, suppressing message
query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node} query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node}
.help = This is a known issue with the compiler. Run {$run_cmd} to allow your project to compile .help = This is a known issue with the compiler. Run {$run_cmd} to allow your project to compile

View file

@ -249,7 +249,7 @@ impl<K: DepKind> DepGraph<K> {
/// get an ICE. Normally, we would have tried (and failed) to mark /// get an ICE. Normally, we would have tried (and failed) to mark
/// some other query green (e.g. `item_children`) which was used /// some other query green (e.g. `item_children`) which was used
/// to obtain `C`, which would prevent us from ever trying to force /// to obtain `C`, which would prevent us from ever trying to force
/// a non-existent `D`. /// a nonexistent `D`.
/// ///
/// It might be possible to enforce that all `DepNode`s read during /// It might be possible to enforce that all `DepNode`s read during
/// deserialization already exist in the previous `DepGraph`. In /// deserialization already exist in the previous `DepGraph`. In

View file

@ -63,7 +63,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn handle_cycle_error(self) -> HandleCycleError; fn handle_cycle_error(self) -> HandleCycleError;
fn hash_result(self) -> HashResult<Self::Value>; fn hash_result(self) -> HashResult<Self::Value>;
// Just here for convernience and checking that the key matches the kind, don't override this. // Just here for convenience and checking that the key matches the kind, don't override this.
fn construct_dep_node(self, tcx: Qcx::DepContext, key: &Self::Key) -> DepNode<Qcx::DepKind> { fn construct_dep_node(self, tcx: Qcx::DepContext, key: &Self::Key) -> DepNode<Qcx::DepKind> {
DepNode::construct(tcx, self.dep_kind(), key) DepNode::construct(tcx, self.dep_kind(), key)
} }

View file

@ -691,7 +691,7 @@ fn incremental_verify_ich_failed<Tcx>(
// which may result in another fingerprint mismatch while we're in the middle // which may result in another fingerprint mismatch while we're in the middle
// of processing this one. To avoid a double-panic (which kills the process // of processing this one. To avoid a double-panic (which kills the process
// before we can print out the query static), we print out a terse // before we can print out the query static), we print out a terse
// but 'safe' message if we detect a re-entrant call to this method. // but 'safe' message if we detect a reentrant call to this method.
thread_local! { thread_local! {
static INSIDE_VERIFY_PANIC: Cell<bool> = const { Cell::new(false) }; static INSIDE_VERIFY_PANIC: Cell<bool> = const { Cell::new(false) };
}; };

View file

@ -175,7 +175,7 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
/// to not update anything and we can skip it. /// to not update anything and we can skip it.
/// ///
/// We are checking this condition only if the correct value of private visibility is /// We are checking this condition only if the correct value of private visibility is
/// cheaply available, otherwise it does't make sense performance-wise. /// cheaply available, otherwise it doesn't make sense performance-wise.
/// ///
/// `None` is returned if the update can be skipped, /// `None` is returned if the update can be skipped,
/// and cheap private visibility is returned otherwise. /// and cheap private visibility is returned otherwise.

View file

@ -22,7 +22,7 @@ pub(crate) struct UnderscoreLifetimeNameCannotBeUsedHere(#[primary_span] pub(cra
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(resolve_crate_may_not_be_imported)] #[diag(resolve_crate_may_not_be_imported)]
pub(crate) struct CrateMayNotBeImprted(#[primary_span] pub(crate) Span); pub(crate) struct CrateMayNotBeImported(#[primary_span] pub(crate) Span);
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(resolve_crate_root_imports_must_be_named_explicitly)] #[diag(resolve_crate_root_imports_must_be_named_explicitly)]

View file

@ -1079,7 +1079,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
for rib in self.lifetime_ribs.iter().rev() { for rib in self.lifetime_ribs.iter().rev() {
match rib.kind { match rib.kind {
// We are inside a `PolyTraitRef`. The lifetimes are // We are inside a `PolyTraitRef`. The lifetimes are
// to be intoduced in that (maybe implicit) `for<>` binder. // to be introduced in that (maybe implicit) `for<>` binder.
LifetimeRibKind::Generics { LifetimeRibKind::Generics {
binder, binder,
kind: LifetimeBinderKind::PolyTrait, kind: LifetimeBinderKind::PolyTrait,
@ -3803,7 +3803,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
// use std::u8; // bring module u8 in scope // use std::u8; // bring module u8 in scope
// fn f() -> u8 { // OK, resolves to primitive u8, not to std::u8 // fn f() -> u8 { // OK, resolves to primitive u8, not to std::u8
// u8::max_value() // OK, resolves to associated function <u8>::max_value, // u8::max_value() // OK, resolves to associated function <u8>::max_value,
// // not to non-existent std::u8::max_value // // not to nonexistent std::u8::max_value
// } // }
// //
// Such behavior is required for backward compatibility. // Such behavior is required for backward compatibility.

View file

@ -91,7 +91,7 @@ fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, Str
/// Description of an elided lifetime. /// Description of an elided lifetime.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub(super) struct MissingLifetime { pub(super) struct MissingLifetime {
/// Used to overwrite the resolution with the suggestion, to avoid cascasing errors. /// Used to overwrite the resolution with the suggestion, to avoid cascading errors.
pub id: NodeId, pub id: NodeId,
/// Where to suggest adding the lifetime. /// Where to suggest adding the lifetime.
pub span: Span, pub span: Span,
@ -408,7 +408,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
} }
let Some(path_last_segment) = path.last() else { return }; let Some(path_last_segment) = path.last() else { return };
let item_str = path_last_segment.ident; let item_str = path_last_segment.ident;
// Emit help message for fake-self from other languages (e.g., `this` in Javascript). // Emit help message for fake-self from other languages (e.g., `this` in JavaScript).
if ["this", "my"].contains(&item_str.as_str()) { if ["this", "my"].contains(&item_str.as_str()) {
err.span_suggestion_short( err.span_suggestion_short(
span, span,

View file

@ -483,7 +483,7 @@ impl SourceMap {
self.span_to_string(sp, FileNameDisplayPreference::Remapped) self.span_to_string(sp, FileNameDisplayPreference::Remapped)
} }
/// Format the span location suitable for pretty printing anotations with relative line numbers /// Format the span location suitable for pretty printing annotations with relative line numbers
pub fn span_to_relative_line_string(&self, sp: Span, relative_to: Span) -> String { pub fn span_to_relative_line_string(&self, sp: Span, relative_to: Span) -> String {
if self.files.borrow().source_files.is_empty() || sp.is_dummy() || relative_to.is_dummy() { if self.files.borrow().source_files.is_empty() || sp.is_dummy() || relative_to.is_dummy() {
return "no-location".to_string(); return "no-location".to_string();
@ -777,7 +777,7 @@ impl SourceMap {
/// Given a 'Span', tries to tell if it's wrapped by "<>" or "()" /// Given a 'Span', tries to tell if it's wrapped by "<>" or "()"
/// the algorithm searches if the next character is '>' or ')' after skipping white space /// the algorithm searches if the next character is '>' or ')' after skipping white space
/// then searches the previous charactoer to match '<' or '(' after skipping white space /// then searches the previous character to match '<' or '(' after skipping white space
/// return true if wrapped by '<>' or '()' /// return true if wrapped by '<>' or '()'
pub fn span_wrapped_by_angle_or_parentheses(&self, span: Span) -> bool { pub fn span_wrapped_by_angle_or_parentheses(&self, span: Span) -> bool {
self.span_to_source(span, |src, start_index, end_index| { self.span_to_source(span, |src, start_index, end_index| {

View file

@ -10,7 +10,7 @@
//! > self-consistent and sensible LLVM IR generation, but does not //! > self-consistent and sensible LLVM IR generation, but does not
//! > conform to any particular ABI. //! > conform to any particular ABI.
//! > //! >
//! > - Doxygen Doxumentation of `clang::DefaultABIInfo` //! > - Doxygen Documentation of `clang::DefaultABIInfo`
//! //!
//! This calling convention may not match AVR-GCC in all cases. //! This calling convention may not match AVR-GCC in all cases.
//! //!

View file

@ -2285,13 +2285,13 @@ impl Target {
} }
} }
} ); } );
($key_name:ident, falliable_list) => ( { ($key_name:ident, fallible_list) => ( {
let name = (stringify!($key_name)).replace("_", "-"); let name = (stringify!($key_name)).replace("_", "-");
obj.remove(&name).and_then(|j| { obj.remove(&name).and_then(|j| {
if let Some(v) = j.as_array() { if let Some(v) = j.as_array() {
match v.iter().map(|a| FromStr::from_str(a.as_str().unwrap())).collect() { match v.iter().map(|a| FromStr::from_str(a.as_str().unwrap())).collect() {
Ok(l) => { base.$key_name = l }, Ok(l) => { base.$key_name = l },
// FIXME: `falliable_list` can't re-use the `key!` macro for list // FIXME: `fallible_list` can't re-use the `key!` macro for list
// elements and the error messages from that macro, so it has a bad // elements and the error messages from that macro, so it has a bad
// generic message instead // generic message instead
Err(_) => return Some(Err( Err(_) => return Some(Err(
@ -2610,7 +2610,7 @@ impl Target {
key!(has_thumb_interworking, bool); key!(has_thumb_interworking, bool);
key!(debuginfo_kind, DebuginfoKind)?; key!(debuginfo_kind, DebuginfoKind)?;
key!(split_debuginfo, SplitDebuginfo)?; key!(split_debuginfo, SplitDebuginfo)?;
key!(supported_split_debuginfo, falliable_list)?; key!(supported_split_debuginfo, fallible_list)?;
key!(supported_sanitizers, SanitizerSet)?; key!(supported_sanitizers, SanitizerSet)?;
key!(default_adjusted_cabi, Option<Abi>)?; key!(default_adjusted_cabi, Option<Abi>)?;
key!(generate_arange_section, bool); key!(generate_arange_section, bool);

View file

@ -12,7 +12,7 @@
// //
// We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`, // We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`,
// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost // etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost
// non-existent from the POV of codegen so it doesn't make sense to have separate targets for them. // nonexistent from the POV of codegen so it doesn't make sense to have separate targets for them.
// And if differences exist between two processors under the same target, rustc flags can be used to // And if differences exist between two processors under the same target, rustc flags can be used to
// optimize for one processor or the other. // optimize for one processor or the other.
// //

View file

@ -51,7 +51,7 @@ pub(super) enum CandidateSource {
BuiltinImpl, BuiltinImpl,
/// An assumption from the environment. /// An assumption from the environment.
/// ///
/// More precicely we've used the `n-th` assumption in the `param_env`. /// More precisely we've used the `n-th` assumption in the `param_env`.
/// ///
/// ## Examples /// ## Examples
/// ///
@ -241,7 +241,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
// HACK: `_: Trait` is ambiguous, because it may be satisfied via a builtin rule, // HACK: `_: Trait` is ambiguous, because it may be satisfied via a builtin rule,
// object bound, alias bound, etc. We are unable to determine this until we can at // object bound, alias bound, etc. We are unable to determine this until we can at
// least structually resolve the type one layer. // least structurally resolve the type one layer.
if goal.predicate.self_ty().is_ty_var() { if goal.predicate.self_ty().is_ty_var() {
return vec![Candidate { return vec![Candidate {
source: CandidateSource::BuiltinImpl, source: CandidateSource::BuiltinImpl,

View file

@ -156,8 +156,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
GenericArgKind::Const(c) => { GenericArgKind::Const(c) => {
if let ty::ConstKind::Bound(debrujin, b) = c.kind() { if let ty::ConstKind::Bound(debruijn, b) = c.kind() {
assert_eq!(debrujin, ty::INNERMOST); assert_eq!(debruijn, ty::INNERMOST);
opt_values[b] = Some(*original_value); opt_values[b] = Some(*original_value);
} }
} }
@ -177,7 +177,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
// As an optimization we sometimes avoid creating a new inference variable here. // As an optimization we sometimes avoid creating a new inference variable here.
// //
// All new inference variables we create start out in the current universe of the caller. // All new inference variables we create start out in the current universe of the caller.
// This is conceptionally wrong as these inference variables would be able to name // This is conceptually wrong as these inference variables would be able to name
// more placeholders then they should be able to. However the inference variables have // more placeholders then they should be able to. However the inference variables have
// to "come from somewhere", so by equating them with the original values of the caller // to "come from somewhere", so by equating them with the original values of the caller
// later on, we pull them down into their correct universe again. // later on, we pull them down into their correct universe again.

View file

@ -591,7 +591,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS)) Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS))
} }
// These types cannot be structurally decomposed into constitutent // These types cannot be structurally decomposed into constituent
// types, and therefore have no built-in auto impl. // types, and therefore have no built-in auto impl.
ty::Dynamic(..) ty::Dynamic(..)
| ty::Param(..) | ty::Param(..)

View file

@ -467,7 +467,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
} }
} }
self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fullfillment errors") self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fulfillment errors")
} }
/// Reports that an overflow has occurred and halts compilation. We /// Reports that an overflow has occurred and halts compilation. We
@ -2056,7 +2056,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) { if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) {
// If any of the candidates is a perfect match, we don't want to show all of them. // If any of the candidates is a perfect match, we don't want to show all of them.
// This is particularly relevant for the case of numeric types (as they all have the // This is particularly relevant for the case of numeric types (as they all have the
// same cathegory). // same category).
candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })); candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. }));
} }
candidates candidates

View file

@ -1381,7 +1381,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
} }
} }
// Issue #104961, we need to add parentheses properly for compond expressions // Issue #104961, we need to add parentheses properly for compound expressions
// for example, `x.starts_with("hi".to_string() + "you")` // for example, `x.starts_with("hi".to_string() + "you")`
// should be `x.starts_with(&("hi".to_string() + "you"))` // should be `x.starts_with(&("hi".to_string() + "you"))`
let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) else { return false; }; let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) else { return false; };

View file

@ -14,7 +14,7 @@ use rustc_span::DUMMY_SP;
use super::outlives_bounds::InferCtxtExt; use super::outlives_bounds::InferCtxtExt;
pub enum CopyImplementationError<'tcx> { pub enum CopyImplementationError<'tcx> {
InfrigingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>), InfringingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
NotAnAdt, NotAnAdt,
HasDestructor, HasDestructor,
} }
@ -125,7 +125,7 @@ pub fn type_allowed_to_implement_copy<'tcx>(
} }
if !infringing.is_empty() { if !infringing.is_empty() {
return Err(CopyImplementationError::InfrigingFields(infringing)); return Err(CopyImplementationError::InfringingFields(infringing));
} }
if adt.has_dtor(tcx) { if adt.has_dtor(tcx) {

View file

@ -203,7 +203,7 @@ fn do_normalize_predicates<'tcx>(
} }
}; };
debug!("do_normalize_predictes: normalized predicates = {:?}", predicates); debug!("do_normalize_predicates: normalized predicates = {:?}", predicates);
// We can use the `elaborated_env` here; the region code only // We can use the `elaborated_env` here; the region code only
// cares about declarations like `'a: 'b`. // cares about declarations like `'a: 'b`.

View file

@ -888,7 +888,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let c1 = tcx.expand_abstract_consts(c1); let c1 = tcx.expand_abstract_consts(c1);
let c2 = tcx.expand_abstract_consts(c2); let c2 = tcx.expand_abstract_consts(c2);
debug!( debug!(
"evalaute_predicate_recursively: equating consts:\nc1= {:?}\nc2= {:?}", "evaluate_predicate_recursively: equating consts:\nc1= {:?}\nc2= {:?}",
c1, c2 c1, c2
); );

View file

@ -285,7 +285,7 @@ pub(super) fn sanity_check_layout<'tcx>(
{ {
// These are never actually accessed anyway, so we can skip the coherence check // These are never actually accessed anyway, so we can skip the coherence check
// for them. They also fail that check, since they have // for them. They also fail that check, since they have
// `Aggregate`/`Uninhbaited` ABI even when the main type is // `Aggregate`/`Uninhabited` ABI even when the main type is
// `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size // `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
// 0, and sometimes, variants without fields have non-0 size.) // 0, and sometimes, variants without fields have non-0 size.)
continue; continue;

View file

@ -243,7 +243,7 @@ fn drop_tys_helper<'tcx>(
} else { } else {
let field_tys = adt_def.all_fields().map(|field| { let field_tys = adt_def.all_fields().map(|field| {
let r = tcx.type_of(field.did).subst(tcx, substs); let r = tcx.type_of(field.did).subst(tcx, substs);
debug!("drop_tys_helper: Subst into {:?} with {:?} gettng {:?}", field, substs, r); debug!("drop_tys_helper: Subst into {:?} with {:?} getting {:?}", field, substs, r);
r r
}); });
if only_significant { if only_significant {

View file

@ -4,7 +4,7 @@ error[E0026]: struct `Thing` does not have a field named `z`
LL | Thing { x, y, z } => {} LL | Thing { x, y, z } => {}
| ^ struct `Thing` does not have this field | ^ struct `Thing` does not have this field
| |
= note: This error indicates that a struct pattern attempted to extract a non-existent field from a struct. Struct fields are identified by the name used before the colon : so struct patterns should resemble the declaration of the struct type being matched. = note: This error indicates that a struct pattern attempted to extract a nonexistent field from a struct. Struct fields are identified by the name used before the colon : so struct patterns should resemble the declaration of the struct type being matched.
If you are using shorthand field patterns but want to refer to the struct field by a different name, you should rename it explicitly. If you are using shorthand field patterns but want to refer to the struct field by a different name, you should rename it explicitly.

Some files were not shown because too many files have changed in this diff Show more