Auto merge of #77798 - JohnTitor:rollup-82u711m, r=JohnTitor
Rollup of 10 pull requests Successful merges: - #77195 (Link to documentation-specific guidelines.) - #77629 (Cleanup of `eat_while()` in lexer) - #77709 (Link Vec leak doc to Box) - #77738 (fix __rust_alloc_error_handler comment) - #77748 (Dead code cleanup in windows-gnu std) - #77754 (Add TraitDef::find_map_relevant_impl) - #77766 (Clarify the debug-related values should take boolean) - #77777 (doc: disambiguate stat in MetadataExt::as_raw_stat) - #77782 (Fix typo in error code description) - #77787 (Update `changelog-seen` in config.toml.example) Failed merges: r? `@ghost`
This commit is contained in:
commit
790d19cd25
15 changed files with 76 additions and 125 deletions
|
@ -21,7 +21,7 @@ impl Foo {
|
|||
The `self` keyword can only be used inside methods, which are associated
|
||||
functions (functions defined inside of a `trait` or `impl` block) that have a
|
||||
`self` receiver as its first parameter, like `self`, `&self`, `&mut self` or
|
||||
`self: &mut Pin<Self>` (this last one is an example of an ["abitrary `self`
|
||||
`self: &mut Pin<Self>` (this last one is an example of an ["arbitrary `self`
|
||||
type"](https://github.com/rust-lang/rust/issues/44874)).
|
||||
|
||||
Check if the associated function's parameter list should have contained a `self`
|
||||
|
|
|
@ -48,6 +48,7 @@ impl Token {
|
|||
}
|
||||
|
||||
/// Enum representing common lexeme types.
|
||||
// perf note: Changing all `usize` to `u32` doesn't change performance. See #77629
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum TokenKind {
|
||||
// Multi-char tokens:
|
||||
|
@ -160,6 +161,7 @@ pub enum LiteralKind {
|
|||
/// - `r##~"abcde"##`: `InvalidStarter`
|
||||
/// - `r###"abcde"##`: `NoTerminator { expected: 3, found: 2, possible_terminator_offset: Some(11)`
|
||||
/// - Too many `#`s (>65535): `TooManyDelimiters`
|
||||
// perf note: It doesn't matter that this makes `Token` 36 bytes bigger. See #77629
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum RawStrError {
|
||||
/// Non `#` characters exist between `r` and `"` eg. `r#~"..`
|
||||
|
@ -689,7 +691,12 @@ impl Cursor<'_> {
|
|||
let mut max_hashes = 0;
|
||||
|
||||
// Count opening '#' symbols.
|
||||
let n_start_hashes = self.eat_while(|c| c == '#');
|
||||
let mut eaten = 0;
|
||||
while self.first() == '#' {
|
||||
eaten += 1;
|
||||
self.bump();
|
||||
}
|
||||
let n_start_hashes = eaten;
|
||||
|
||||
// Check that string is started.
|
||||
match self.bump() {
|
||||
|
@ -724,16 +731,11 @@ impl Cursor<'_> {
|
|||
// Note that this will not consume extra trailing `#` characters:
|
||||
// `r###"abcde"####` is lexed as a `RawStr { n_hashes: 3 }`
|
||||
// followed by a `#` token.
|
||||
let mut hashes_left = n_start_hashes;
|
||||
let is_closing_hash = |c| {
|
||||
if c == '#' && hashes_left != 0 {
|
||||
hashes_left -= 1;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
let n_end_hashes = self.eat_while(is_closing_hash);
|
||||
let mut n_end_hashes = 0;
|
||||
while self.first() == '#' && n_end_hashes < n_start_hashes {
|
||||
n_end_hashes += 1;
|
||||
self.bump();
|
||||
}
|
||||
|
||||
if n_end_hashes == n_start_hashes {
|
||||
return (n_start_hashes, None);
|
||||
|
@ -807,17 +809,9 @@ impl Cursor<'_> {
|
|||
}
|
||||
|
||||
/// Eats symbols while predicate returns true or until the end of file is reached.
|
||||
/// Returns amount of eaten symbols.
|
||||
fn eat_while<F>(&mut self, mut predicate: F) -> usize
|
||||
where
|
||||
F: FnMut(char) -> bool,
|
||||
{
|
||||
let mut eaten: usize = 0;
|
||||
fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) {
|
||||
while predicate(self.first()) && !self.is_eof() {
|
||||
eaten += 1;
|
||||
self.bump();
|
||||
}
|
||||
|
||||
eaten
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,10 +123,26 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
self_ty: Ty<'tcx>,
|
||||
mut f: F,
|
||||
) {
|
||||
let _: Option<()> = self.find_map_relevant_impl(def_id, self_ty, |did| {
|
||||
f(did);
|
||||
None
|
||||
});
|
||||
}
|
||||
|
||||
/// Applies function to every impl that could possibly match the self type `self_ty` and returns
|
||||
/// the first non-none value.
|
||||
pub fn find_map_relevant_impl<T, F: FnMut(DefId) -> Option<T>>(
|
||||
self,
|
||||
def_id: DefId,
|
||||
self_ty: Ty<'tcx>,
|
||||
mut f: F,
|
||||
) -> Option<T> {
|
||||
let impls = self.trait_impls_of(def_id);
|
||||
|
||||
for &impl_def_id in impls.blanket_impls.iter() {
|
||||
f(impl_def_id);
|
||||
if let result @ Some(_) = f(impl_def_id) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// simplify_type(.., false) basically replaces type parameters and
|
||||
|
@ -157,14 +173,20 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
if let Some(simp) = fast_reject::simplify_type(self, self_ty, true) {
|
||||
if let Some(impls) = impls.non_blanket_impls.get(&simp) {
|
||||
for &impl_def_id in impls {
|
||||
f(impl_def_id);
|
||||
if let result @ Some(_) = f(impl_def_id) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for &impl_def_id in impls.non_blanket_impls.values().flatten() {
|
||||
f(impl_def_id);
|
||||
if let result @ Some(_) = f(impl_def_id) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns an iterator containing all impls
|
||||
|
|
|
@ -346,14 +346,14 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
let drop_trait = self.lang_items().drop_trait()?;
|
||||
self.ensure().coherent_trait(drop_trait);
|
||||
|
||||
let mut dtor_did = None;
|
||||
let ty = self.type_of(adt_did);
|
||||
self.for_each_relevant_impl(drop_trait, ty, |impl_did| {
|
||||
let dtor_did = self.find_map_relevant_impl(drop_trait, ty, |impl_did| {
|
||||
if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
|
||||
if validate(self, impl_did).is_ok() {
|
||||
dtor_did = Some(item.def_id);
|
||||
return Some(item.def_id);
|
||||
}
|
||||
}
|
||||
None
|
||||
});
|
||||
|
||||
Some(ty::Destructor { did: dtor_did? })
|
||||
|
|
|
@ -34,7 +34,6 @@ impl<'a, 'tcx> ConstMutationChecker<'a, 'tcx> {
|
|||
|
||||
fn is_const_item_without_destructor(&self, local: Local) -> Option<DefId> {
|
||||
let def_id = self.is_const_item(local)?;
|
||||
let mut any_dtor = |_tcx, _def_id| Ok(());
|
||||
|
||||
// We avoid linting mutation of a const item if the const's type has a
|
||||
// Drop impl. The Drop logic observes the mutation which was performed.
|
||||
|
@ -54,7 +53,7 @@ impl<'a, 'tcx> ConstMutationChecker<'a, 'tcx> {
|
|||
//
|
||||
// #[const_mutation_allowed]
|
||||
// pub const LOG: Log = Log { msg: "" };
|
||||
match self.tcx.calculate_dtor(def_id, &mut any_dtor) {
|
||||
match self.tcx.calculate_dtor(def_id, &mut |_, _| Ok(())) {
|
||||
Some(_) => None,
|
||||
None => Some(def_id),
|
||||
}
|
||||
|
|
|
@ -1384,17 +1384,11 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> {
|
|||
trait_ref: &ty::PolyTraitRef<'tcx>,
|
||||
) {
|
||||
let get_trait_impl = |trait_def_id| {
|
||||
let mut trait_impl = None;
|
||||
self.tcx.for_each_relevant_impl(
|
||||
self.tcx.find_map_relevant_impl(
|
||||
trait_def_id,
|
||||
trait_ref.skip_binder().self_ty(),
|
||||
|impl_def_id| {
|
||||
if trait_impl.is_none() {
|
||||
trait_impl = Some(impl_def_id);
|
||||
}
|
||||
},
|
||||
);
|
||||
trait_impl
|
||||
|impl_def_id| Some(impl_def_id),
|
||||
)
|
||||
};
|
||||
let required_trait_path = self.tcx.def_path_str(trait_ref.def_id());
|
||||
let all_traits = self.tcx.all_traits(LOCAL_CRATE);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue