Auto merge of #84334 - klensy:typo-compiler, r=jyn514
fix few typos in comments
This commit is contained in:
commit
e888a57da8
32 changed files with 39 additions and 39 deletions
|
@ -1346,7 +1346,7 @@ pub enum ExprKind {
|
||||||
Field(P<Expr>, Ident),
|
Field(P<Expr>, Ident),
|
||||||
/// An indexing operation (e.g., `foo[2]`).
|
/// An indexing operation (e.g., `foo[2]`).
|
||||||
Index(P<Expr>, P<Expr>),
|
Index(P<Expr>, P<Expr>),
|
||||||
/// A range (e.g., `1..2`, `1..`, `..2`, `1..=2`, `..=2`; and `..` in destructuring assingment).
|
/// A range (e.g., `1..2`, `1..`, `..2`, `1..=2`, `..=2`; and `..` in destructuring assignment).
|
||||||
Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits),
|
Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits),
|
||||||
/// An underscore, used in destructuring assignment to ignore a value.
|
/// An underscore, used in destructuring assignment to ignore a value.
|
||||||
Underscore,
|
Underscore,
|
||||||
|
|
|
@ -301,7 +301,7 @@ impl AttrAnnotatedTokenStream {
|
||||||
/// tokens.
|
/// tokens.
|
||||||
///
|
///
|
||||||
/// For example, `#[cfg(FALSE)] struct Foo {}` would
|
/// For example, `#[cfg(FALSE)] struct Foo {}` would
|
||||||
/// have an `attrs` field contaiing the `#[cfg(FALSE)]` attr,
|
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
|
||||||
/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
|
/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
|
||||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||||
pub struct AttributesData {
|
pub struct AttributesData {
|
||||||
|
|
|
@ -321,7 +321,7 @@ struct HandlerInner {
|
||||||
|
|
||||||
/// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
|
/// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
|
||||||
/// emitting the same diagnostic with extended help (`--teach`) twice, which
|
/// emitting the same diagnostic with extended help (`--teach`) twice, which
|
||||||
/// would be uneccessary repetition.
|
/// would be unnecessary repetition.
|
||||||
taught_diagnostics: FxHashSet<DiagnosticId>,
|
taught_diagnostics: FxHashSet<DiagnosticId>,
|
||||||
|
|
||||||
/// Used to suggest rustc --explain <error code>
|
/// Used to suggest rustc --explain <error code>
|
||||||
|
|
|
@ -69,7 +69,7 @@ enum KleeneOp {
|
||||||
ZeroOrMore,
|
ZeroOrMore,
|
||||||
/// Kleene plus (`+`) for one or more repetitions
|
/// Kleene plus (`+`) for one or more repetitions
|
||||||
OneOrMore,
|
OneOrMore,
|
||||||
/// Kleene optional (`?`) for zero or one reptitions
|
/// Kleene optional (`?`) for zero or one repetitions
|
||||||
ZeroOrOne,
|
ZeroOrOne,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -209,7 +209,7 @@ pub(super) fn transcribe<'a>(
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// 0 is the initial counter (we have done 0 repretitions so far). `len`
|
// 0 is the initial counter (we have done 0 repretitions so far). `len`
|
||||||
// is the total number of reptitions we should generate.
|
// is the total number of repetitions we should generate.
|
||||||
repeats.push((0, len));
|
repeats.push((0, len));
|
||||||
|
|
||||||
// The first time we encounter the sequence we push it to the stack. It
|
// The first time we encounter the sequence we push it to the stack. It
|
||||||
|
@ -362,7 +362,7 @@ impl LockstepIterSize {
|
||||||
/// appropriate meta-vars in `interpolations`.
|
/// appropriate meta-vars in `interpolations`.
|
||||||
///
|
///
|
||||||
/// Note that if `repeats` does not match the exact correct depth of a meta-var,
|
/// Note that if `repeats` does not match the exact correct depth of a meta-var,
|
||||||
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presence of
|
||||||
/// multiple nested matcher sequences.
|
/// multiple nested matcher sequences.
|
||||||
fn lockstep_iter_size(
|
fn lockstep_iter_size(
|
||||||
tree: &mbe::TokenTree,
|
tree: &mbe::TokenTree,
|
||||||
|
|
|
@ -191,7 +191,7 @@ impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> {
|
||||||
///
|
///
|
||||||
/// This also tests if the given const `ct` contains an inference variable which was previously
|
/// This also tests if the given const `ct` contains an inference variable which was previously
|
||||||
/// unioned with `target_vid`. If this is the case, inferring `target_vid` to `ct`
|
/// unioned with `target_vid`. If this is the case, inferring `target_vid` to `ct`
|
||||||
/// would result in an infinite type as we continously replace an inference variable
|
/// would result in an infinite type as we continuously replace an inference variable
|
||||||
/// in `ct` with `ct` itself.
|
/// in `ct` with `ct` itself.
|
||||||
///
|
///
|
||||||
/// This is especially important as unevaluated consts use their parents generics.
|
/// This is especially important as unevaluated consts use their parents generics.
|
||||||
|
|
|
@ -279,7 +279,7 @@ where
|
||||||
/// Relate a type inference variable with a value type. This works
|
/// Relate a type inference variable with a value type. This works
|
||||||
/// by creating a "generalization" G of the value where all the
|
/// by creating a "generalization" G of the value where all the
|
||||||
/// lifetimes are replaced with fresh inference values. This
|
/// lifetimes are replaced with fresh inference values. This
|
||||||
/// genearlization G becomes the value of the inference variable,
|
/// generalization G becomes the value of the inference variable,
|
||||||
/// and is then related in turn to the value. So e.g. if you had
|
/// and is then related in turn to the value. So e.g. if you had
|
||||||
/// `vid = ?0` and `value = &'a u32`, we might first instantiate
|
/// `vid = ?0` and `value = &'a u32`, we might first instantiate
|
||||||
/// `?0` to a type like `&'0 u32` where `'0` is a fresh variable,
|
/// `?0` to a type like `&'0 u32` where `'0` is a fresh variable,
|
||||||
|
|
|
@ -3022,7 +3022,7 @@ impl<'tcx> LateLintPass<'tcx> for DerefNullPtr {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
/// test if experssion is the literal `0`
|
/// test if expression is the literal `0`
|
||||||
fn is_zero(expr: &hir::Expr<'_>) -> bool {
|
fn is_zero(expr: &hir::Expr<'_>) -> bool {
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
rustc_hir::ExprKind::Lit(ref lit) => {
|
rustc_hir::ExprKind::Lit(ref lit) => {
|
||||||
|
|
|
@ -2483,7 +2483,7 @@ declare_lint! {
|
||||||
///
|
///
|
||||||
/// On x86, `asm!` uses the intel assembly syntax by default. While this
|
/// On x86, `asm!` uses the intel assembly syntax by default. While this
|
||||||
/// can be switched using assembler directives like `.att_syntax`, using the
|
/// can be switched using assembler directives like `.att_syntax`, using the
|
||||||
/// `att_syntax` option is recomended instead because it will also properly
|
/// `att_syntax` option is recommended instead because it will also properly
|
||||||
/// prefix register placeholders with `%` as required by AT&T syntax.
|
/// prefix register placeholders with `%` as required by AT&T syntax.
|
||||||
pub BAD_ASM_STYLE,
|
pub BAD_ASM_STYLE,
|
||||||
Warn,
|
Warn,
|
||||||
|
@ -2678,7 +2678,7 @@ declare_lint! {
|
||||||
/// Statics with an uninhabited type can never be initialized, so they are impossible to define.
|
/// Statics with an uninhabited type can never be initialized, so they are impossible to define.
|
||||||
/// However, this can be side-stepped with an `extern static`, leading to problems later in the
|
/// However, this can be side-stepped with an `extern static`, leading to problems later in the
|
||||||
/// compiler which assumes that there are no initialized uninhabited places (such as locals or
|
/// compiler which assumes that there are no initialized uninhabited places (such as locals or
|
||||||
/// statics). This was accientally allowed, but is being phased out.
|
/// statics). This was accidentally allowed, but is being phased out.
|
||||||
pub UNINHABITED_STATIC,
|
pub UNINHABITED_STATIC,
|
||||||
Warn,
|
Warn,
|
||||||
"uninhabited static",
|
"uninhabited static",
|
||||||
|
|
|
@ -45,7 +45,7 @@ static_assert_size!(InterpErrorInfo<'_>, 8);
|
||||||
|
|
||||||
/// Packages the kind of error we got from the const code interpreter
|
/// Packages the kind of error we got from the const code interpreter
|
||||||
/// up with a Rust-level backtrace of where the error occurred.
|
/// up with a Rust-level backtrace of where the error occurred.
|
||||||
/// Thsese should always be constructed by calling `.into()` on
|
/// These should always be constructed by calling `.into()` on
|
||||||
/// a `InterpError`. In `rustc_mir::interpret`, we have `throw_err_*`
|
/// a `InterpError`. In `rustc_mir::interpret`, we have `throw_err_*`
|
||||||
/// macros for this.
|
/// macros for this.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -1521,7 +1521,7 @@ pub enum StatementKind<'tcx> {
|
||||||
/// Marks the start of a "coverage region", injected with '-Zinstrument-coverage'. A
|
/// Marks the start of a "coverage region", injected with '-Zinstrument-coverage'. A
|
||||||
/// `Coverage` statement carries metadata about the coverage region, used to inject a coverage
|
/// `Coverage` statement carries metadata about the coverage region, used to inject a coverage
|
||||||
/// map into the binary. If `Coverage::kind` is a `Counter`, the statement also generates
|
/// map into the binary. If `Coverage::kind` is a `Counter`, the statement also generates
|
||||||
/// executable code, to increment a counter varible at runtime, each time the code region is
|
/// executable code, to increment a counter variable at runtime, each time the code region is
|
||||||
/// executed.
|
/// executed.
|
||||||
Coverage(Box<Coverage>),
|
Coverage(Box<Coverage>),
|
||||||
|
|
||||||
|
|
|
@ -1280,7 +1280,7 @@ rustc_queries! {
|
||||||
desc { "testing if a region is late bound" }
|
desc { "testing if a region is late bound" }
|
||||||
}
|
}
|
||||||
/// For a given item (like a struct), gets the default lifetimes to be used
|
/// For a given item (like a struct), gets the default lifetimes to be used
|
||||||
/// for each paramter if a trait object were to be passed for that parameter.
|
/// for each parameter if a trait object were to be passed for that parameter.
|
||||||
/// For example, for `struct Foo<'a, T, U>`, this would be `['static, 'static]`.
|
/// For example, for `struct Foo<'a, T, U>`, this would be `['static, 'static]`.
|
||||||
/// For `struct Foo<'a, T: 'a, U>`, this would instead be `['a, 'static]`.
|
/// For `struct Foo<'a, T: 'a, U>`, this would instead be `['a, 'static]`.
|
||||||
query object_lifetime_defaults_map(_: LocalDefId)
|
query object_lifetime_defaults_map(_: LocalDefId)
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl CapturedPlace<'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the `LocalDefId` of the closure that captureed this Place
|
/// Returns the `LocalDefId` of the closure that captured this Place
|
||||||
pub fn get_closure_local_def_id(&self) -> LocalDefId {
|
pub fn get_closure_local_def_id(&self) -> LocalDefId {
|
||||||
match self.place.base {
|
match self.place.base {
|
||||||
HirPlaceBase::Upvar(upvar_id) => upvar_id.closure_expr_id,
|
HirPlaceBase::Upvar(upvar_id) => upvar_id.closure_expr_id,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! Contains utilities for generating suggestions for borrowck errors related to unsatisified
|
//! Contains utilities for generating suggestions for borrowck errors related to unsatisfied
|
||||||
//! outlives constraints.
|
//! outlives constraints.
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
|
||||||
/// Extra machine state for CTFE, and the Machine instance
|
/// Extra machine state for CTFE, and the Machine instance
|
||||||
pub struct CompileTimeInterpreter<'mir, 'tcx> {
|
pub struct CompileTimeInterpreter<'mir, 'tcx> {
|
||||||
/// For now, the number of terminators that can be evaluated before we throw a resource
|
/// For now, the number of terminators that can be evaluated before we throw a resource
|
||||||
/// exhuastion error.
|
/// exhaustion error.
|
||||||
///
|
///
|
||||||
/// Setting this to `0` disables the limit and allows the interpreter to run forever.
|
/// Setting this to `0` disables the limit and allows the interpreter to run forever.
|
||||||
pub steps_remaining: usize,
|
pub steps_remaining: usize,
|
||||||
|
|
|
@ -510,7 +510,7 @@ impl EffectIndex {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the effect at `self` should be applied eariler than the effect at `other`
|
/// Returns `true` if the effect at `self` should be applied earlier than the effect at `other`
|
||||||
/// in forward order.
|
/// in forward order.
|
||||||
fn precedes_in_forward_order(self, other: Self) -> bool {
|
fn precedes_in_forward_order(self, other: Self) -> bool {
|
||||||
let ord = self
|
let ord = self
|
||||||
|
|
|
@ -225,7 +225,7 @@ impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||||
/// Normalice `place.ptr` to a `Pointer` if this is a place and not a ZST.
|
/// Normalize `place.ptr` to a `Pointer` if this is a place and not a ZST.
|
||||||
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
|
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn force_op_ptr(
|
pub fn force_op_ptr(
|
||||||
|
|
|
@ -29,11 +29,11 @@ pub fn in_any_value_of_ty(
|
||||||
/// Normally, we would determine what qualifications apply to each type and error when an illegal
|
/// Normally, we would determine what qualifications apply to each type and error when an illegal
|
||||||
/// operation is performed on such a type. However, this was found to be too imprecise, especially
|
/// operation is performed on such a type. However, this was found to be too imprecise, especially
|
||||||
/// in the presence of `enum`s. If only a single variant of an enum has a certain qualification, we
|
/// in the presence of `enum`s. If only a single variant of an enum has a certain qualification, we
|
||||||
/// needn't reject code unless it actually constructs and operates on the qualifed variant.
|
/// needn't reject code unless it actually constructs and operates on the qualified variant.
|
||||||
///
|
///
|
||||||
/// To accomplish this, const-checking and promotion use a value-based analysis (as opposed to a
|
/// To accomplish this, const-checking and promotion use a value-based analysis (as opposed to a
|
||||||
/// type-based one). Qualifications propagate structurally across variables: If a local (or a
|
/// type-based one). Qualifications propagate structurally across variables: If a local (or a
|
||||||
/// projection of a local) is assigned a qualifed value, that local itself becomes qualifed.
|
/// projection of a local) is assigned a qualified value, that local itself becomes qualified.
|
||||||
pub trait Qualif {
|
pub trait Qualif {
|
||||||
/// The name of the file used to debug the dataflow analysis that computes this qualif.
|
/// The name of the file used to debug the dataflow analysis that computes this qualif.
|
||||||
const ANALYSIS_NAME: &'static str;
|
const ANALYSIS_NAME: &'static str;
|
||||||
|
|
|
@ -816,7 +816,7 @@ fn bcb_to_string_sections(
|
||||||
sections
|
sections
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a simple string representation of a `TerminatorKind` variant, indenpendent of any
|
/// Returns a simple string representation of a `TerminatorKind` variant, independent of any
|
||||||
/// values it might hold.
|
/// values it might hold.
|
||||||
pub(super) fn term_type(kind: &TerminatorKind<'tcx>) -> &'static str {
|
pub(super) fn term_type(kind: &TerminatorKind<'tcx>) -> &'static str {
|
||||||
match kind {
|
match kind {
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
//! Also note, some basic features of `Span` also rely on the `Span`s own "session globals", which
|
//! Also note, some basic features of `Span` also rely on the `Span`s own "session globals", which
|
||||||
//! are unrelated to the `TyCtxt` global. Without initializing the `Span` session globals, some
|
//! are unrelated to the `TyCtxt` global. Without initializing the `Span` session globals, some
|
||||||
//! basic, coverage-specific features would be impossible to test, but thankfully initializing these
|
//! basic, coverage-specific features would be impossible to test, but thankfully initializing these
|
||||||
//! globals is comparitively simpler. The easiest way is to wrap the test in a closure argument
|
//! globals is comparatively simpler. The easiest way is to wrap the test in a closure argument
|
||||||
//! to: `rustc_span::with_default_session_globals(|| { test_here(); })`.
|
//! to: `rustc_span::with_default_session_globals(|| { test_here(); })`.
|
||||||
|
|
||||||
use super::counters;
|
use super::counters;
|
||||||
|
|
|
@ -3,7 +3,7 @@ use rustc_middle::ty::subst::SubstsRef;
|
||||||
use rustc_middle::ty::{self, TyCtxt};
|
use rustc_middle::ty::{self, TyCtxt};
|
||||||
use rustc_span::def_id::DefId;
|
use rustc_span::def_id::DefId;
|
||||||
|
|
||||||
/// Checks if the specified `local` is used as the `self` prameter of a method call
|
/// Checks if the specified `local` is used as the `self` parameter of a method call
|
||||||
/// in the provided `BasicBlock`. If it is, then the `DefId` of the called method is
|
/// in the provided `BasicBlock`. If it is, then the `DefId` of the called method is
|
||||||
/// returned.
|
/// returned.
|
||||||
pub fn find_self_call<'tcx>(
|
pub fn find_self_call<'tcx>(
|
||||||
|
|
|
@ -80,7 +80,7 @@ crate struct PlaceBuilder<'tcx> {
|
||||||
/// The projections are truncated to represent a path that might be captured by a
|
/// The projections are truncated to represent a path that might be captured by a
|
||||||
/// closure/generator. This implies the vector returned from this function doesn't contain
|
/// closure/generator. This implies the vector returned from this function doesn't contain
|
||||||
/// ProjectionElems `Downcast`, `ConstantIndex`, `Index`, or `Subslice` because those will never be
|
/// ProjectionElems `Downcast`, `ConstantIndex`, `Index`, or `Subslice` because those will never be
|
||||||
/// part of a path that is captued by a closure. We stop applying projections once we see the first
|
/// part of a path that is captured by a closure. We stop applying projections once we see the first
|
||||||
/// projection that isn't captured by a closure.
|
/// projection that isn't captured by a closure.
|
||||||
fn convert_to_hir_projections_and_truncate_for_capture<'tcx>(
|
fn convert_to_hir_projections_and_truncate_for_capture<'tcx>(
|
||||||
mir_projections: &[PlaceElem<'tcx>],
|
mir_projections: &[PlaceElem<'tcx>],
|
||||||
|
@ -578,7 +578,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||||
|
|
||||||
/// Lower a captured upvar. Note we might not know the actual capture index,
|
/// Lower a captured upvar. Note we might not know the actual capture index,
|
||||||
/// so we create a place starting from `PlaceBase::Upvar`, which will be resolved
|
/// so we create a place starting from `PlaceBase::Upvar`, which will be resolved
|
||||||
/// once all projections that allow us to indentify a capture have been applied.
|
/// once all projections that allow us to identify a capture have been applied.
|
||||||
fn lower_captured_upvar(
|
fn lower_captured_upvar(
|
||||||
&mut self,
|
&mut self,
|
||||||
block: BasicBlock,
|
block: BasicBlock,
|
||||||
|
|
|
@ -1448,7 +1448,7 @@ impl<'a> Parser<'a> {
|
||||||
Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false })))
|
Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false })))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Is this unambiguously the start of a `macro_rules! foo` item defnition?
|
/// Is this unambiguously the start of a `macro_rules! foo` item definition?
|
||||||
fn is_macro_rules_item(&mut self) -> bool {
|
fn is_macro_rules_item(&mut self) -> bool {
|
||||||
self.check_keyword(kw::MacroRules)
|
self.check_keyword(kw::MacroRules)
|
||||||
&& self.look_ahead(1, |t| *t == token::Not)
|
&& self.look_ahead(1, |t| *t == token::Not)
|
||||||
|
|
|
@ -1114,7 +1114,7 @@ impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> {
|
||||||
|
|
||||||
////////////////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////////////////
|
||||||
/// Type privacy visitor, checks types for privacy and reports violations.
|
/// Type privacy visitor, checks types for privacy and reports violations.
|
||||||
/// Both explicitly written types and inferred types of expressions and patters are checked.
|
/// Both explicitly written types and inferred types of expressions and patterns are checked.
|
||||||
/// Checks are performed on "semantic" types regardless of names and their hygiene.
|
/// Checks are performed on "semantic" types regardless of names and their hygiene.
|
||||||
////////////////////////////////////////////////////////////////////////////////////////////
|
////////////////////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,7 @@ enum Scope<'a> {
|
||||||
opaque_type_parent: bool,
|
opaque_type_parent: bool,
|
||||||
|
|
||||||
/// True only if this `Binder` scope is from the quantifiers on a
|
/// True only if this `Binder` scope is from the quantifiers on a
|
||||||
/// `PolyTraitRef`. This is necessary for `assocated_type_bounds`, which
|
/// `PolyTraitRef`. This is necessary for `associated_type_bounds`, which
|
||||||
/// requires binders of nested trait refs to be merged.
|
/// requires binders of nested trait refs to be merged.
|
||||||
from_poly_trait_ref: bool,
|
from_poly_trait_ref: bool,
|
||||||
|
|
||||||
|
@ -303,7 +303,7 @@ enum Scope<'a> {
|
||||||
/// of. Other than that, if ask for bound vars for each, we expect
|
/// of. Other than that, if ask for bound vars for each, we expect
|
||||||
/// `['a, 'b]`. If we *didn't* allow binders before `T`, then we would
|
/// `['a, 'b]`. If we *didn't* allow binders before `T`, then we would
|
||||||
/// always introduce a binder scope at the inner trait ref. This is great,
|
/// always introduce a binder scope at the inner trait ref. This is great,
|
||||||
/// becauase later on during type-checking, we will ask "what are the late
|
/// because later on during type-checking, we will ask "what are the late
|
||||||
/// bound vars on this trait ref". However, because we allow bound vars on
|
/// bound vars on this trait ref". However, because we allow bound vars on
|
||||||
/// the bound itself, we have to have some way of keeping track of the fact
|
/// the bound itself, we have to have some way of keeping track of the fact
|
||||||
/// that we actually want to store the late bound vars as being associated
|
/// that we actually want to store the late bound vars as being associated
|
||||||
|
|
|
@ -748,7 +748,7 @@ pub struct ExpnData {
|
||||||
|
|
||||||
/// Used to force two `ExpnData`s to have different `Fingerprint`s.
|
/// Used to force two `ExpnData`s to have different `Fingerprint`s.
|
||||||
/// Due to macro expansion, it's possible to end up with two `ExpnId`s
|
/// Due to macro expansion, it's possible to end up with two `ExpnId`s
|
||||||
/// that have identical `ExpnData`s. This violates the constract of `HashStable`
|
/// that have identical `ExpnData`s. This violates the contract of `HashStable`
|
||||||
/// - the two `ExpnId`s are not equal, but their `Fingerprint`s are equal
|
/// - the two `ExpnId`s are not equal, but their `Fingerprint`s are equal
|
||||||
/// (since the numerical `ExpnId` value is not considered by the `HashStable`
|
/// (since the numerical `ExpnId` value is not considered by the `HashStable`
|
||||||
/// implementation).
|
/// implementation).
|
||||||
|
|
|
@ -73,8 +73,8 @@ mod attr_impl {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sometimes an ABI requires small integers to be extended to a full or partial register. This enum
|
/// Sometimes an ABI requires small integers to be extended to a full or partial register. This enum
|
||||||
/// defines if this extension should be zero-extension or sign-extension when necssary. When it is
|
/// defines if this extension should be zero-extension or sign-extension when necessary. When it is
|
||||||
/// not necesary to extend the argument, this enum is ignored.
|
/// not necessary to extend the argument, this enum is ignored.
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
pub enum ArgExtension {
|
pub enum ArgExtension {
|
||||||
None,
|
None,
|
||||||
|
|
|
@ -1954,7 +1954,7 @@ pub enum ArgKind {
|
||||||
Arg(String, String),
|
Arg(String, String),
|
||||||
|
|
||||||
/// An argument of tuple type. For a "found" argument, the span is
|
/// An argument of tuple type. For a "found" argument, the span is
|
||||||
/// the locationo in the source of the pattern. For a "expected"
|
/// the location in the source of the pattern. For a "expected"
|
||||||
/// argument, it will be None. The vector is a list of (name, ty)
|
/// argument, it will be None. The vector is a list of (name, ty)
|
||||||
/// strings for the components of the tuple.
|
/// strings for the components of the tuple.
|
||||||
Tuple(Option<Span>, Vec<(String, String)>),
|
Tuple(Option<Span>, Vec<(String, String)>),
|
||||||
|
|
|
@ -944,7 +944,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||||
/// subobligations without taking in a 'parent' depth, causing the
|
/// subobligations without taking in a 'parent' depth, causing the
|
||||||
/// generated subobligations to have a `recursion_depth` of `0`.
|
/// generated subobligations to have a `recursion_depth` of `0`.
|
||||||
///
|
///
|
||||||
/// To ensure that obligation_depth never decreasees, we force all subobligations
|
/// To ensure that obligation_depth never decreases, we force all subobligations
|
||||||
/// to have at least the depth of the original obligation.
|
/// to have at least the depth of the original obligation.
|
||||||
fn add_depth<T: 'cx, I: Iterator<Item = &'cx mut Obligation<'tcx, T>>>(
|
fn add_depth<T: 'cx, I: Iterator<Item = &'cx mut Obligation<'tcx, T>>>(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -158,7 +158,7 @@ enum ProbeResult {
|
||||||
|
|
||||||
/// When adjusting a receiver we often want to do one of
|
/// When adjusting a receiver we often want to do one of
|
||||||
///
|
///
|
||||||
/// - Add a `&` (or `&mut`), converting the recevier from `T` to `&T` (or `&mut T`)
|
/// - Add a `&` (or `&mut`), converting the receiver from `T` to `&T` (or `&mut T`)
|
||||||
/// - If the receiver has type `*mut T`, convert it to `*const T`
|
/// - If the receiver has type `*mut T`, convert it to `*const T`
|
||||||
///
|
///
|
||||||
/// This type tells us which one to do.
|
/// This type tells us which one to do.
|
||||||
|
|
|
@ -1588,7 +1588,7 @@ fn migration_suggestion_for_2229(
|
||||||
/// If both the CaptureKind and Expression are considered to be equivalent,
|
/// If both the CaptureKind and Expression are considered to be equivalent,
|
||||||
/// then `CaptureInfo` A is preferred. This can be useful in cases where we want to priortize
|
/// then `CaptureInfo` A is preferred. This can be useful in cases where we want to priortize
|
||||||
/// expressions reported back to the user as part of diagnostics based on which appears earlier
|
/// expressions reported back to the user as part of diagnostics based on which appears earlier
|
||||||
/// in the closure. This can be acheived simply by calling
|
/// in the closure. This can be achieved simply by calling
|
||||||
/// `determine_capture_info(existing_info, current_info)`. This works out because the
|
/// `determine_capture_info(existing_info, current_info)`. This works out because the
|
||||||
/// expressions that occur earlier in the closure body than the current expression are processed before.
|
/// expressions that occur earlier in the closure body than the current expression are processed before.
|
||||||
/// Consider the following example
|
/// Consider the following example
|
||||||
|
|
|
@ -671,7 +671,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
||||||
/// In the following example the closures `c` only captures `p.x`` even though `incr`
|
/// In the following example the closures `c` only captures `p.x`` even though `incr`
|
||||||
/// is a capture of the nested closure
|
/// is a capture of the nested closure
|
||||||
///
|
///
|
||||||
/// ```rust,ignore(cannot-test-this-because-pseduo-code)
|
/// ```rust,ignore(cannot-test-this-because-pseudo-code)
|
||||||
/// let p = ..;
|
/// let p = ..;
|
||||||
/// let c = || {
|
/// let c = || {
|
||||||
/// let incr = 10;
|
/// let incr = 10;
|
||||||
|
@ -715,7 +715,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
||||||
// The only places we want to fake read before creating the parent closure are the ones that
|
// The only places we want to fake read before creating the parent closure are the ones that
|
||||||
// are not local to it/ defined by it.
|
// are not local to it/ defined by it.
|
||||||
//
|
//
|
||||||
// ```rust,ignore(cannot-test-this-because-pseduo-code)
|
// ```rust,ignore(cannot-test-this-because-pseudo-code)
|
||||||
// let v1 = (0, 1);
|
// let v1 = (0, 1);
|
||||||
// let c = || { // fake reads: v1
|
// let c = || { // fake reads: v1
|
||||||
// let v2 = (0, 1);
|
// let v2 = (0, 1);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue