From 4a1372251850c6cf62dfe7d380bc515763426107 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 12 Feb 2024 15:26:59 +1100 Subject: [PATCH 01/92] Tweak delayed bug mentions. Now that we have both `delayed_bug` and `span_delayed_bug`, it makes sense to use the generic term "delayed bug" more. --- crates/hir-def/src/attr/builtin.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index b20ee9e5bf6..55b9a1dfdcb 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -650,7 +650,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_error, Normal, - template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly + template!(Word, List: "delayed_bug_from_inside_query"), WarnFollowingWordOnly ), rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing), From a52acccc588b77520a003a217b91f47413371cac Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 23 Aug 2022 21:45:15 +0900 Subject: [PATCH 02/92] Implement `RustIrDatabase::impl_provided_for()` for `ChalkContext` --- crates/hir-ty/src/chalk_db.rs | 197 ++++++++++++++++++++++------------ 1 file changed, 130 insertions(+), 67 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index bd243518fc6..157f7ce462d 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -1,7 +1,7 @@ //! The implementation of `RustIrDatabase` for Chalk, which provides information //! about the code that Chalk needs. use core::ops; -use std::{iter, sync::Arc}; +use std::{iter, ops::ControlFlow, sync::Arc}; use tracing::debug; @@ -136,81 +136,91 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), }; - let trait_module = trait_.module(self.db.upcast()); - let type_module = match self_ty_fp { - Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())), - Some(TyFingerprint::ForeignType(type_id)) => { - Some(from_foreign_def_id(type_id).module(self.db.upcast())) - } - Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())), - _ => None, - }; - - let mut def_blocks = - [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; - - // Note: Since we're using impls_for_trait, only impls where the trait - // can be resolved should ever reach Chalk. impl_datum relies on that - // and will panic if the trait can't be resolved. - let in_deps = self.db.trait_impls_in_deps(self.krate); - let in_self = self.db.trait_impls_in_crate(self.krate); - - let block_impls = iter::successors(self.block, |&block_id| { - cov_mark::hit!(block_local_impls); - self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block()) - }) - .inspect(|&block_id| { - // make sure we don't search the same block twice - def_blocks.iter_mut().for_each(|block| { - if *block == Some(block_id) { - *block = None; - } - }); - }) - .filter_map(|block_id| self.db.trait_impls_in_block(block_id)); - let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); + let mut result = vec![]; - match fps { - [] => { - debug!("Unrestricted search for {:?} impls...", trait_); - let mut f = |impls: &TraitImpls| { - result.extend(impls.for_trait(trait_).map(id_to_chalk)); - }; - f(&in_self); - in_deps.iter().map(ops::Deref::deref).for_each(&mut f); - block_impls.for_each(|it| f(&it)); - def_blocks - .into_iter() - .flatten() - .filter_map(|it| self.db.trait_impls_in_block(it)) - .for_each(|it| f(&it)); - } - fps => { - let mut f = - |impls: &TraitImpls| { - result.extend(fps.iter().flat_map(|fp| { - impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) - })); - }; - f(&in_self); - in_deps.iter().map(ops::Deref::deref).for_each(&mut f); - block_impls.for_each(|it| f(&it)); - def_blocks - .into_iter() - .flatten() - .filter_map(|it| self.db.trait_impls_in_block(it)) - .for_each(|it| f(&it)); - } - } + if fps.is_empty() { + debug!("Unrestricted search for {:?} impls...", trait_); + self.for_trait_impls(trait_, self_ty_fp, |impls| { + result.extend(impls.for_trait(trait_).map(id_to_chalk)); + ControlFlow::Continue(()) + }) + } else { + self.for_trait_impls(trait_, self_ty_fp, |impls| { + result.extend( + fps.iter().flat_map(move |fp| { + impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) + }), + ); + ControlFlow::Continue(()) + }) + }; debug!("impls_for_trait returned {} impls", result.len()); result } fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind) -> bool { debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); - false // FIXME + + let trait_id = from_chalk_trait_id(auto_trait_id); + let self_ty = kind.clone().intern(Interner); + // We cannot filter impls by `TyFingerprint` for the following types: + let self_ty_fp = match kind { + // because we need to find any impl whose Self type is a ref with the same mutability + // (we don't care about the inner type). + TyKind::Ref(..) => None, + // because we need to find any impl whose Self type is a tuple with the same arity. + TyKind::Tuple(..) => None, + _ => TyFingerprint::for_trait_impl(&self_ty), + }; + + let check_kind = |impl_id| { + let impl_self_ty = self.db.impl_self_ty(impl_id); + // NOTE(skip_binders): it's safe to skip binders here as we don't check substitutions. + let impl_self_kind = impl_self_ty.skip_binders().kind(Interner); + + match (kind, impl_self_kind) { + (TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b, + (TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b, + (TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b, + (TyKind::Str, TyKind::Str) => true, + (TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b, + (TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b, + (TyKind::Slice(_), TyKind::Slice(_)) => true, + (TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b, + (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) => id_a == id_b, + (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, + (TyKind::Never, TyKind::Never) => true, + (TyKind::Array(_, _), TyKind::Array(_, _)) => true, + (TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b, + (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) => id_a == id_b, + (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { + id_a == id_b + } + (TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b, + (TyKind::Error, TyKind::Error) => true, + (_, _) => false, + } + }; + + if let Some(fp) = self_ty_fp { + self.for_trait_impls(trait_id, self_ty_fp, |impls| { + match impls.for_trait_and_self_ty(trait_id, fp).any(check_kind) { + true => ControlFlow::Break(()), + false => ControlFlow::Continue(()), + } + }) + } else { + self.for_trait_impls(trait_id, self_ty_fp, |impls| { + match impls.for_trait(trait_id).any(check_kind) { + true => ControlFlow::Break(()), + false => ControlFlow::Continue(()), + } + }) + } + .is_break() } + fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc { self.db.associated_ty_value(self.krate, id) } @@ -489,6 +499,59 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } } +impl<'a> ChalkContext<'a> { + fn for_trait_impls( + &self, + trait_id: hir_def::TraitId, + self_ty_fp: Option, + mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>, + ) -> ControlFlow<()> { + // Note: Since we're using `impls_for_trait` and `impl_provided_for`, + // only impls where the trait can be resolved should ever reach Chalk. + // `impl_datum` relies on that and will panic if the trait can't be resolved. + let in_deps = self.db.trait_impls_in_deps(self.krate); + let in_self = self.db.trait_impls_in_crate(self.krate); + let trait_module = trait_id.module(self.db.upcast()); + let type_module = match self_ty_fp { + Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())), + Some(TyFingerprint::ForeignType(type_id)) => { + Some(from_foreign_def_id(type_id).module(self.db.upcast())) + } + Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())), + _ => None, + }; + + let mut def_blocks = + [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; + + let block_impls = iter::successors(self.block, |&block_id| { + cov_mark::hit!(block_local_impls); + self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block()) + }) + .inspect(|&block_id| { + // make sure we don't search the same block twice + def_blocks.iter_mut().for_each(|block| { + if *block == Some(block_id) { + *block = None; + } + }); + }) + .filter_map(|block_id| self.db.trait_impls_in_block(block_id)); + f(&in_self)?; + for it in in_deps.iter().map(ops::Deref::deref) { + f(it)?; + } + for it in block_impls { + f(&it)?; + } + for it in def_blocks.into_iter().flatten().filter_map(|it| self.db.trait_impls_in_block(it)) + { + f(&it)?; + } + ControlFlow::Continue(()) + } +} + impl chalk_ir::UnificationDatabase for &dyn HirDatabase { fn fn_def_variance( &self, From 4940017716b7d3eeba314a361a9e4afa64e85a95 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 23 Aug 2022 21:56:56 +0900 Subject: [PATCH 03/92] Rename `StructDatum` -> `AdtDatum` --- crates/hir-ty/src/chalk_db.rs | 20 ++++++++++---------- crates/hir-ty/src/db.rs | 6 +++--- crates/hir/src/lib.rs | 4 ++-- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 157f7ce462d..33ae07e3638 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -33,7 +33,7 @@ use crate::{ pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum; -pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum; +pub(crate) type AdtDatum = chalk_solve::rust_ir::AdtDatum; pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum; pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum; @@ -53,8 +53,8 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn trait_datum(&self, trait_id: TraitId) -> Arc { self.db.trait_datum(self.krate, trait_id) } - fn adt_datum(&self, struct_id: AdtId) -> Arc { - self.db.struct_datum(self.krate, struct_id) + fn adt_datum(&self, struct_id: AdtId) -> Arc { + self.db.adt_datum(self.krate, struct_id) } fn adt_repr(&self, _struct_id: AdtId) -> Arc> { // FIXME: keep track of these @@ -712,13 +712,13 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { } } -pub(crate) fn struct_datum_query( +pub(crate) fn adt_datum_query( db: &dyn HirDatabase, krate: CrateId, - struct_id: AdtId, -) -> Arc { - debug!("struct_datum {:?}", struct_id); - let chalk_ir::AdtId(adt_id) = struct_id; + adt_id: AdtId, +) -> Arc { + debug!("adt_datum {:?}", adt_id); + let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); let upstream = adt_id.module(db.upcast()).krate() != krate; let where_clauses = { @@ -737,10 +737,10 @@ pub(crate) fn struct_datum_query( fields: Vec::new(), // FIXME add fields (only relevant for auto traits), }; let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; - let struct_datum = StructDatum { + let struct_datum = AdtDatum { // FIXME set ADT kind kind: rust_ir::AdtKind::Struct, - id: struct_id, + id: chalk_ir::AdtId(adt_id), binders: make_binders(db, &generic_params, struct_datum_bound), flags, }; diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index fbd366864a4..42313ff52b1 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -220,12 +220,12 @@ pub trait HirDatabase: DefDatabase + Upcast { trait_id: chalk_db::TraitId, ) -> sync::Arc; - #[salsa::invoke(chalk_db::struct_datum_query)] - fn struct_datum( + #[salsa::invoke(chalk_db::adt_datum_query)] + fn adt_datum( &self, krate: CrateId, struct_id: chalk_db::AdtId, - ) -> sync::Arc; + ) -> sync::Arc; #[salsa::invoke(chalk_db::impl_datum_query)] fn impl_datum( diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 08f7bb14caa..beaa6dd4d67 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3798,9 +3798,9 @@ impl Type { // For non-phantom_data adts we check variants/fields as well as generic parameters TyKind::Adt(adt_id, substitution) - if !db.struct_datum(krate, *adt_id).flags.phantom_data => + if !db.adt_datum(krate, *adt_id).flags.phantom_data => { - let adt_datum = &db.struct_datum(krate, *adt_id); + let adt_datum = &db.adt_datum(krate, *adt_id); let adt_datum_bound = adt_datum.binders.clone().substitute(Interner, substitution); adt_datum_bound diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 296253aa1ee..1a214ef0bf5 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -124,7 +124,7 @@ impl RootDatabase { hir::db::InternCoroutineQuery hir::db::AssociatedTyDataQuery hir::db::TraitDatumQuery - hir::db::StructDatumQuery + hir::db::AdtDatumQuery hir::db::ImplDatumQuery hir::db::FnDefDatumQuery hir::db::FnDefVarianceQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 2881748dd47..d31dad514aa 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -280,7 +280,7 @@ impl RootDatabase { // hir_db::InternCoroutineQuery hir_db::AssociatedTyDataQuery hir_db::TraitDatumQuery - hir_db::StructDatumQuery + hir_db::AdtDatumQuery hir_db::ImplDatumQuery hir_db::FnDefDatumQuery hir_db::FnDefVarianceQuery From 03340742ea42c00588707001aca8a05c9d846e08 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 24 Aug 2022 01:55:08 +0900 Subject: [PATCH 04/92] Return ADT fields and `phantom_data` flag from `adt_datum_query()` --- crates/hir-ty/src/chalk_db.rs | 59 ++++++++++++++++++++++++++--------- 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 33ae07e3638..0fde0f661d7 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -720,26 +720,55 @@ pub(crate) fn adt_datum_query( debug!("adt_datum {:?}", adt_id); let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); - let upstream = adt_id.module(db.upcast()).krate() != krate; - let where_clauses = { - let generic_params = generics(db.upcast(), adt_id.into()); - let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - convert_where_clauses(db, adt_id.into(), &bound_vars) - }; + let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); + let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); + + let phantom_data_id = db + .lang_item(krate, SmolStr::new_inline("phantom_data")) + .and_then(|item| item.as_struct()) + .map(|item| item.into()); let flags = rust_ir::AdtFlags { - upstream, - // FIXME set fundamental and phantom_data flags correctly + upstream: adt_id.module(db.upcast()).krate() != krate, + // FIXME set fundamental flags correctly fundamental: false, - phantom_data: false, + phantom_data: phantom_data_id == Some(adt_id), }; - // FIXME provide enum variants properly (for auto traits) - let variant = rust_ir::AdtVariantDatum { - fields: Vec::new(), // FIXME add fields (only relevant for auto traits), + + let variant_id_to_fields = |id| { + let field_types = db.field_types(id); + let fields = id + .variant_data(db.upcast()) + .fields() + .iter() + .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) + .collect(); + rust_ir::AdtVariantDatum { fields } }; - let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; + + let (kind, variants) = match adt_id { + hir_def::AdtId::StructId(id) => { + (rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())]) + } + hir_def::AdtId::EnumId(id) => { + let variants = db + .enum_data(id) + .variants + .iter() + .map(|(local_id, _)| { + let variant_id = hir_def::EnumVariantId { parent: id, local_id }; + variant_id_to_fields(variant_id.into()) + }) + .collect(); + (rust_ir::AdtKind::Enum, variants) + } + hir_def::AdtId::UnionId(id) => { + (rust_ir::AdtKind::Union, vec![variant_id_to_fields(id.into())]) + } + }; + + let struct_datum_bound = rust_ir::AdtDatumBound { variants, where_clauses }; let struct_datum = AdtDatum { - // FIXME set ADT kind - kind: rust_ir::AdtKind::Struct, + kind, id: chalk_ir::AdtId(adt_id), binders: make_binders(db, &generic_params, struct_datum_bound), flags, From 4829f591fbf6ce7e91394775c8be10cdf9291d1b Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 24 Aug 2022 03:27:59 +0900 Subject: [PATCH 05/92] Add test for auto trait bounds --- crates/hir-ty/src/chalk_db.rs | 7 ++-- crates/hir-ty/src/tests/traits.rs | 55 +++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 5 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 0fde0f661d7..5039d51d70c 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -724,7 +724,7 @@ pub(crate) fn adt_datum_query( let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); let phantom_data_id = db - .lang_item(krate, SmolStr::new_inline("phantom_data")) + .lang_item(krate, LangItem::PhantomData) .and_then(|item| item.as_struct()) .map(|item| item.into()); let flags = rust_ir::AdtFlags { @@ -754,10 +754,7 @@ pub(crate) fn adt_datum_query( .enum_data(id) .variants .iter() - .map(|(local_id, _)| { - let variant_id = hir_def::EnumVariantId { parent: id, local_id }; - variant_id_to_fields(variant_id.into()) - }) + .map(|&(variant_id, _)| variant_id_to_fields(variant_id.into())) .collect(); (rust_ir::AdtKind::Enum, variants) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index db14addaf18..68cd6071ec7 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4553,3 +4553,58 @@ fn foo() { "#, ); } + +#[test] +fn auto_trait_bound() { + check_types( + r#" +//- minicore: sized +auto trait Send {} +impl !Send for *const T {} + +struct Yes; +trait IsSend { const IS_SEND: Yes; } +impl IsSend for T { const IS_SEND: Yes = Yes; } + +struct Struct(T); +enum Enum { A, B(T) } +union Union { t: T } + +#[lang = "phantom_data"] +struct PhantomData; + +fn f() { + T::IS_SEND; + //^^^^^^^^^^Yes + U::IS_SEND; + //^^^^^^^^^^{unknown} + <*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^{unknown} + Struct::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^Yes + Struct::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^{unknown} + Struct::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + Enum::::IS_SEND; + //^^^^^^^^^^^^^^^^^^Yes + Enum::::IS_SEND; + //^^^^^^^^^^^^^^^^^^{unknown} + Enum::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + Union::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^Yes + Union::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^{unknown} + Union::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + PhantomData::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^Yes + PhantomData::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + PhantomData::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} +} +"#, + ); +} From 0eca3ef93eacd34e47d2893a3777e1d55593823c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 13:35:43 +0100 Subject: [PATCH 06/92] Fix coerce_unsize_generic test --- crates/hir-ty/src/tests/coercion.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index d56b15b9b74..bfb8df61a33 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -536,7 +536,7 @@ fn test() { #[test] fn coerce_unsize_generic() { - check( + check_no_mismatches( r#" //- minicore: coerce_unsized struct Foo { t: T }; @@ -544,9 +544,7 @@ struct Bar(Foo); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; - //^^^^^^^^^^^^^^^^^^^^^ expected &Foo<[usize]>, got &Foo<[i32; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); - //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &Bar<[usize]>, got &Bar<[i32; 3]> } "#, ); From 9d18e197bcbd732a349ff8b51ecf6532d7ea822a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 14:01:23 +0100 Subject: [PATCH 07/92] Filter out `{unknown}` types in `adt_datum_quqery` --- crates/hir-def/src/data/adt.rs | 2 +- crates/hir-ty/src/chalk_db.rs | 69 ++++++++++++++++++++-------------- 2 files changed, 42 insertions(+), 29 deletions(-) diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs index 540f643ae7d..f07b1257662 100644 --- a/crates/hir-def/src/data/adt.rs +++ b/crates/hir-def/src/data/adt.rs @@ -40,7 +40,7 @@ pub struct StructData { } bitflags! { - #[derive(Debug, Clone, PartialEq, Eq)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct StructFlags: u8 { const NO_FLAGS = 0; /// Indicates whether the struct is `PhantomData`. diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 5039d51d70c..49393f05a1a 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -10,9 +10,10 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; use base_db::CrateId; use hir_def::{ + data::adt::StructFlags, hir::Movability, lang_item::{LangItem, LangItemTarget}, - AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, + AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, }; use hir_expand::name::name; @@ -159,6 +160,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { debug!("impls_for_trait returned {} impls", result.len()); result } + fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind) -> bool { debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); @@ -183,22 +185,22 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { (TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b, (TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b, (TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b, - (TyKind::Str, TyKind::Str) => true, + (TyKind::Error, TyKind::Error) + | (TyKind::Str, TyKind::Str) + | (TyKind::Slice(_), TyKind::Slice(_)) + | (TyKind::Never, TyKind::Never) + | (TyKind::Array(_, _), TyKind::Array(_, _)) => true, (TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b, (TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b, - (TyKind::Slice(_), TyKind::Slice(_)) => true, (TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b, - (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) => id_a == id_b, - (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, - (TyKind::Never, TyKind::Never) => true, - (TyKind::Array(_, _), TyKind::Array(_, _)) => true, + (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) + | (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, (TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b, - (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) => id_a == id_b, - (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { + (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) + | (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { id_a == id_b } (TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b, - (TyKind::Error, TyKind::Error) => true, (_, _) => false, } }; @@ -653,7 +655,7 @@ pub(crate) fn trait_datum_query( coinductive: false, // only relevant for Chalk testing // FIXME: set these flags correctly marker: false, - fundamental: false, + fundamental: trait_data.fundamental, }; let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); @@ -715,33 +717,44 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { pub(crate) fn adt_datum_query( db: &dyn HirDatabase, krate: CrateId, - adt_id: AdtId, + chalk_ir::AdtId(adt_id): AdtId, ) -> Arc { debug!("adt_datum {:?}", adt_id); - let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); - let phantom_data_id = db - .lang_item(krate, LangItem::PhantomData) - .and_then(|item| item.as_struct()) - .map(|item| item.into()); + let (fundamental, phantom_data) = match adt_id { + hir_def::AdtId::StructId(s) => { + let flags = db.struct_data(s).flags; + ( + flags.contains(StructFlags::IS_FUNDAMENTAL), + flags.contains(StructFlags::IS_PHANTOM_DATA), + ) + } + // FIXME set fundamental flags correctly + hir_def::AdtId::UnionId(_) => (false, false), + hir_def::AdtId::EnumId(_) => (false, false), + }; let flags = rust_ir::AdtFlags { upstream: adt_id.module(db.upcast()).krate() != krate, - // FIXME set fundamental flags correctly - fundamental: false, - phantom_data: phantom_data_id == Some(adt_id), + fundamental, + phantom_data, }; - let variant_id_to_fields = |id| { - let field_types = db.field_types(id); - let fields = id - .variant_data(db.upcast()) - .fields() - .iter() - .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) - .collect(); + let variant_id_to_fields = |id: VariantId| { + let variant_data = &id.variant_data(db.upcast()); + let fields = if variant_data.fields().is_empty() { + vec![] + } else { + let field_types = db.field_types(id); + variant_data + .fields() + .iter() + .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) + .filter(|it| !it.contains_unknown()) + .collect() + }; rust_ir::AdtVariantDatum { fields } }; From d33d8675d0fff6f9652790baa5346ac478e528ea Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Wed, 14 Feb 2024 20:12:05 +1100 Subject: [PATCH 08/92] Add `ErrorGuaranteed` to `ast::LitKind::Err`, `token::LitKind::Err`. This mostly works well, and eliminates a couple of delayed bugs. One annoying thing is that we should really also add an `ErrorGuaranteed` to `proc_macro::bridge::LitKind::Err`. But that's difficult because `proc_macro` doesn't have access to `ErrorGuaranteed`, so we have to fake it. --- crates/proc-macro-srv/src/server/rust_analyzer_span.rs | 4 ++-- crates/proc-macro-srv/src/server/token_id.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index c7c7bea9941..8a9d52a37a2 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -72,7 +72,7 @@ impl server::FreeFunctions for RaSpanServer { ) -> Result, ()> { // FIXME: keep track of LitKind and Suffix Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, s), suffix: None, span: self.call_site, @@ -202,7 +202,7 @@ impl server::TokenStream for RaSpanServer { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { // FIXME: handle literal kinds - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, &lit.text), // FIXME: handle suffixes suffix: None, diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index edbdc67b482..15a9e0deae4 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -64,7 +64,7 @@ impl server::FreeFunctions for TokenIdServer { ) -> Result, ()> { // FIXME: keep track of LitKind and Suffix Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, s), suffix: None, span: self.call_site, @@ -187,7 +187,7 @@ impl server::TokenStream for TokenIdServer { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { // FIXME: handle literal kinds - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, &lit.text), // FIXME: handle suffixes suffix: None, From e3450ad19b6b5a6a67ba5bfdae53ac826953ba2a Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Sun, 12 Nov 2023 05:58:42 +0000 Subject: [PATCH 09/92] Fix bootstrap issue with git on MSYS src/bootstrap runs git to find the root of the repository, but this can go awry when building in MSYS for the mingw target. This is because MSYS git returns a unix-y path, but bootstrap requires a Windows-y path. --- src/bootstrap/src/core/config/config.rs | 31 ++++++++++++++++--------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index c0dd1e12084..6fba6b20ace 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -1227,12 +1227,16 @@ impl Config { // Infer the rest of the configuration. // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary, - // running on a completely machine from where it was compiled. + // running on a completely different machine from where it was compiled. let mut cmd = Command::new("git"); - // NOTE: we cannot support running from outside the repository because the only path we have available - // is set at compile time, which can be wrong if bootstrap was downloaded from source. + // NOTE: we cannot support running from outside the repository because the only other path we have available + // is set at compile time, which can be wrong if bootstrap was downloaded rather than compiled locally. // We still support running outside the repository if we find we aren't in a git directory. - cmd.arg("rev-parse").arg("--show-toplevel"); + + // NOTE: We get a relative path from git to work around an issue on MSYS/mingw. If we used an absolute path, + // and end up using MSYS's git rather than git-for-windows, we would get a unix-y MSYS path. But as bootstrap + // has already been (kinda-cross-)compiled to Windows land, we require a normal Windows path. + cmd.arg("rev-parse").arg("--show-cdup"); // Discard stderr because we expect this to fail when building from a tarball. let output = cmd .stderr(std::process::Stdio::null()) @@ -1240,15 +1244,20 @@ impl Config { .ok() .and_then(|output| if output.status.success() { Some(output) } else { None }); if let Some(output) = output { - let git_root = String::from_utf8(output.stdout).unwrap(); - // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes. - let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap(); + let git_root_relative = String::from_utf8(output.stdout).unwrap(); + // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes, + // and to resolve any relative components. + let git_root = env::current_dir() + .unwrap() + .join(PathBuf::from(git_root_relative.trim())) + .canonicalize() + .unwrap(); let s = git_root.to_str().unwrap(); // Bootstrap is quite bad at handling /? in front of paths - let src = match s.strip_prefix("\\\\?\\") { + let git_root = match s.strip_prefix("\\\\?\\") { Some(p) => PathBuf::from(p), - None => PathBuf::from(git_root), + None => git_root, }; // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when, // for example, the build directory is inside of another unrelated git directory. @@ -1256,8 +1265,8 @@ impl Config { // // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1. - if src.join("src").join("stage0.json").exists() { - config.src = src; + if git_root.join("src").join("stage0.json").exists() { + config.src = git_root; } } else { // We're building from a tarball, not git sources. From d846586bc9e0c056a92f8394b6209782238dc5e2 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 1 Feb 2024 20:38:42 -0500 Subject: [PATCH 10/92] fix: Support multiple tab stops in completions in VSCode Uses the native VSCode support for `SnippetTextEdit`s, but in a semi-hacky way as it's not fully supported yet. --- editors/code/src/commands.ts | 66 ++++++++++++++++++++++++-- editors/code/src/snippets.ts | 89 +++++++++++++++--------------------- 2 files changed, 99 insertions(+), 56 deletions(-) diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 3d33d255ad4..849fae5cf24 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -4,7 +4,11 @@ import * as ra from "./lsp_ext"; import * as path from "path"; import type { Ctx, Cmd, CtxInit } from "./ctx"; -import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets"; +import { + applySnippetWorkspaceEdit, + applySnippetTextEdits, + type SnippetTextDocumentEdit, +} from "./snippets"; import { spawnSync } from "child_process"; import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; import { AstInspector } from "./ast_inspector"; @@ -1006,7 +1010,6 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { return; } const itemEdit = item.edit; - const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); // filter out all text edits and recreate the WorkspaceEdit without them so we can apply // snippet edits on our own const lcFileSystemEdit = { @@ -1017,16 +1020,71 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { lcFileSystemEdit, ); await vscode.workspace.applyEdit(fileSystemEdit); - await applySnippetWorkspaceEdit(edit); + + // replace all text edits so that we can convert snippet text edits into `vscode.SnippetTextEdit`s + // FIXME: this is a workaround until vscode-languageclient supports doing the SnippeTextEdit conversion itself + // also need to carry the snippetTextDocumentEdits separately, since we can't retrieve them again using WorkspaceEdit.entries + const [workspaceTextEdit, snippetTextDocumentEdits] = asWorkspaceSnippetEdit(ctx, itemEdit); + await applySnippetWorkspaceEdit(workspaceTextEdit, snippetTextDocumentEdits); if (item.command != null) { await vscode.commands.executeCommand(item.command.command, item.command.arguments); } }; } +function asWorkspaceSnippetEdit( + ctx: CtxInit, + item: lc.WorkspaceEdit, +): [vscode.WorkspaceEdit, SnippetTextDocumentEdit[]] { + const client = ctx.client; + + // partially borrowed from https://github.com/microsoft/vscode-languageserver-node/blob/295aaa393fda8ecce110c38880a00466b9320e63/client/src/common/protocolConverter.ts#L1060-L1101 + const result = new vscode.WorkspaceEdit(); + + if (item.documentChanges) { + const snippetTextDocumentEdits: SnippetTextDocumentEdit[] = []; + + for (const change of item.documentChanges) { + if (lc.TextDocumentEdit.is(change)) { + const uri = client.protocol2CodeConverter.asUri(change.textDocument.uri); + const snippetTextEdits: (vscode.TextEdit | vscode.SnippetTextEdit)[] = []; + + for (const edit of change.edits) { + if ( + "insertTextFormat" in edit && + edit.insertTextFormat === lc.InsertTextFormat.Snippet + ) { + // is a snippet text edit + snippetTextEdits.push( + new vscode.SnippetTextEdit( + client.protocol2CodeConverter.asRange(edit.range), + new vscode.SnippetString(edit.newText), + ), + ); + } else { + // always as a text document edit + snippetTextEdits.push( + vscode.TextEdit.replace( + client.protocol2CodeConverter.asRange(edit.range), + edit.newText, + ), + ); + } + } + + snippetTextDocumentEdits.push([uri, snippetTextEdits]); + } + } + return [result, snippetTextDocumentEdits]; + } else { + // we don't handle WorkspaceEdit.changes since it's not relevant for code actions + return [result, []]; + } +} + export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd { return async (edit: vscode.WorkspaceEdit) => { - await applySnippetWorkspaceEdit(edit); + await applySnippetWorkspaceEdit(edit, edit.entries()); }; } diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts index d81765649ff..fb12125bcd8 100644 --- a/editors/code/src/snippets.ts +++ b/editors/code/src/snippets.ts @@ -3,20 +3,28 @@ import * as vscode from "vscode"; import { assert } from "./util"; import { unwrapUndefinable } from "./undefinable"; -export async function applySnippetWorkspaceEdit(edit: vscode.WorkspaceEdit) { - if (edit.entries().length === 1) { - const [uri, edits] = unwrapUndefinable(edit.entries()[0]); +export type SnippetTextDocumentEdit = [vscode.Uri, (vscode.TextEdit | vscode.SnippetTextEdit)[]]; + +export async function applySnippetWorkspaceEdit( + edit: vscode.WorkspaceEdit, + editEntries: SnippetTextDocumentEdit[], +) { + if (editEntries.length === 1) { + const [uri, edits] = unwrapUndefinable(editEntries[0]); const editor = await editorFromUri(uri); - if (editor) await applySnippetTextEdits(editor, edits); + if (editor) { + edit.set(uri, edits); + await vscode.workspace.applyEdit(edit); + } return; } - for (const [uri, edits] of edit.entries()) { + for (const [uri, edits] of editEntries) { const editor = await editorFromUri(uri); if (editor) { await editor.edit((builder) => { for (const indel of edits) { assert( - !parseSnippet(indel.newText), + !(indel instanceof vscode.SnippetTextEdit), `bad ws edit: snippet received with multiple edits: ${JSON.stringify( edit, )}`, @@ -39,53 +47,30 @@ async function editorFromUri(uri: vscode.Uri): Promise { - for (const indel of edits) { - const parsed = parseSnippet(indel.newText); - if (parsed) { - const [newText, [placeholderStart, placeholderLength]] = parsed; - const prefix = newText.substr(0, placeholderStart); - const lastNewline = prefix.lastIndexOf("\n"); + const edit = new vscode.WorkspaceEdit(); + edit.set(editor.document.uri, toSnippetTextEdits(edits)); + await vscode.workspace.applyEdit(edit); +} - const startLine = indel.range.start.line + lineDelta + countLines(prefix); - const startColumn = - lastNewline === -1 - ? indel.range.start.character + placeholderStart - : prefix.length - lastNewline - 1; - const endColumn = startColumn + placeholderLength; - selections.push( - new vscode.Selection( - new vscode.Position(startLine, startColumn), - new vscode.Position(startLine, endColumn), - ), - ); - builder.replace(indel.range, newText); - } else { - builder.replace(indel.range, indel.newText); - } - lineDelta += - countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); +function hasSnippet(snip: string): boolean { + const m = snip.match(/\$\d+|\{\d+:[^}]*\}/); + return m != null; +} + +function toSnippetTextEdits( + edits: vscode.TextEdit[], +): (vscode.TextEdit | vscode.SnippetTextEdit)[] { + return edits.map((textEdit) => { + // Note: text edits without any snippets are returned as-is instead of + // being wrapped in a SnippetTextEdit, as otherwise it would be + // treated as if it had a tab stop at the end. + if (hasSnippet(textEdit.newText)) { + return new vscode.SnippetTextEdit( + textEdit.range, + new vscode.SnippetString(textEdit.newText), + ); + } else { + return textEdit; } }); - if (selections.length > 0) editor.selections = selections; - if (selections.length === 1) { - const selection = unwrapUndefinable(selections[0]); - editor.revealRange(selection, vscode.TextEditorRevealType.InCenterIfOutsideViewport); - } -} - -function parseSnippet(snip: string): [string, [number, number]] | undefined { - const m = snip.match(/\$(0|\{0:([^}]*)\})/); - if (!m) return undefined; - const placeholder = m[2] ?? ""; - if (m.index == null) return undefined; - const range: [number, number] = [m.index, placeholder.length]; - const insert = snip.replace(m[0], placeholder); - return [insert, range]; -} - -function countLines(text: string): number { - return (text.match(/\n/g) || []).length; } From bcf14e27ce9d4331f4685fb759cf135a76ad119c Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 18:39:17 -0500 Subject: [PATCH 11/92] Work around snippet edits doubling up extra indentation We can't tell vscode to not add in the extra indentation, so we instead opt to remove it from the edits themselves, and then let vscode add it back in. --- editors/code/src/snippets.ts | 71 +++++++++++++++++++++++++++++++++++- 1 file changed, 69 insertions(+), 2 deletions(-) diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts index fb12125bcd8..b3982bdf2be 100644 --- a/editors/code/src/snippets.ts +++ b/editors/code/src/snippets.ts @@ -13,7 +13,7 @@ export async function applySnippetWorkspaceEdit( const [uri, edits] = unwrapUndefinable(editEntries[0]); const editor = await editorFromUri(uri); if (editor) { - edit.set(uri, edits); + edit.set(uri, removeLeadingWhitespace(editor, edits)); await vscode.workspace.applyEdit(edit); } return; @@ -48,7 +48,8 @@ async function editorFromUri(uri: vscode.Uri): Promise { + if (edit instanceof vscode.SnippetTextEdit) { + const snippetEdit: vscode.SnippetTextEdit = edit; + const firstLineEnd = snippetEdit.snippet.value.indexOf("\n"); + + if (firstLineEnd !== -1) { + // Is a multi-line snippet, remove the indentation which + // would be added back in by vscode. + const startLine = editor.document.lineAt(snippetEdit.range.start.line); + const leadingWhitespace = getLeadingWhitespace( + startLine.text, + 0, + startLine.firstNonWhitespaceCharacterIndex, + ); + + const [firstLine, rest] = splitAt(snippetEdit.snippet.value, firstLineEnd + 1); + const unindentedLines = rest + .split("\n") + .map((line) => line.replace(leadingWhitespace, "")) + .join("\n"); + + snippetEdit.snippet.value = firstLine + unindentedLines; + } + + return snippetEdit; + } else { + return edit; + } + }); +} + +// based on https://github.com/microsoft/vscode/blob/main/src/vs/base/common/strings.ts#L284 +function getLeadingWhitespace(str: string, start: number = 0, end: number = str.length): string { + for (let i = start; i < end; i++) { + const chCode = str.charCodeAt(i); + if (chCode !== " ".charCodeAt(0) && chCode !== " ".charCodeAt(0)) { + return str.substring(start, i); + } + } + return str.substring(start, end); +} + +function splitAt(str: string, index: number): [string, string] { + return [str.substring(0, index), str.substring(index)]; +} From 80459c14a4a6d9613d2bc07f5f0df905e8f99a0d Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Tue, 13 Feb 2024 04:37:31 +0000 Subject: [PATCH 12/92] Changes to CI related to mingw and MSYS --- .github/workflows/ci.yml | 33 ++++++++++++++++++++++++++++ src/ci/github-actions/ci.yml | 21 ++++++++++++++++++ src/ci/run.sh | 2 +- src/ci/scripts/install-clang.sh | 3 +-- src/ci/scripts/install-mingw.sh | 10 ++++----- src/ci/scripts/install-msys2.sh | 38 +++++++++++++++++++++++++++------ src/ci/shared.sh | 4 ++++ 7 files changed, 96 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 26e589c092e..464fd3b5640 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,9 +65,20 @@ jobs: - name: x86_64-gnu-tools os: ubuntu-20.04-16core-64gb env: {} + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code @@ -459,9 +470,20 @@ jobs: RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-extended --enable-profiler" SCRIPT: python x.py dist bootstrap --include-default-paths os: windows-2019-8core-32gb + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code @@ -587,9 +609,20 @@ jobs: env: CODEGEN_BACKENDS: "llvm,cranelift" os: ubuntu-20.04-16core-64gb + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 43e48c01176..51d9dea5a0b 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -111,10 +111,31 @@ x--expand-yaml-anchors--remove: if: success() && !env.SKIP_JOB - &base-ci-job + defaults: + run: + shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} timeout-minutes: 600 runs-on: "${{ matrix.os }}" env: *shared-ci-variables steps: + - if: contains(matrix.os, 'windows') + uses: msys2/setup-msys2@v2.22.0 + with: + # i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64. + msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }} + # don't try to download updates for already installed packages + update: false + # don't try to use the msys that comes built-in to the github runner, + # so we can control what is installed (i.e. not python) + release: true + # Inherit the full path from the Windows environment, with MSYS2's */bin/ + # dirs placed in front. This lets us run Windows-native Python etc. + path-type: inherit + install: > + make + dos2unix + diffutils + - name: disable git crlf conversion run: git config --global core.autocrlf false diff --git a/src/ci/run.sh b/src/ci/run.sh index 1cdcffc1a75..3ad04c73d3d 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -76,7 +76,7 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set dist.compression-profile=balance # the LLVM build, as not to run out of memory. # This is an attempt to fix the spurious build error tracked by # https://github.com/rust-lang/rust/issues/108227. -if isWindows && [[ ${CUSTOM_MINGW-0} -eq 1 ]]; then +if isKnownToBeMingwBuild; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.link-jobs=1" fi diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh index 77164ed4117..aa7ff813f51 100755 --- a/src/ci/scripts/install-clang.sh +++ b/src/ci/scripts/install-clang.sh @@ -37,8 +37,7 @@ if isMacOS; then # Configure `AR` specifically so rustbuild doesn't try to infer it as # `clang-ar` by accident. ciCommandSetEnv AR "ar" -elif isWindows && [[ ${CUSTOM_MINGW-0} -ne 1 ]]; then - +elif isWindows && ! isKnownToBeMingwBuild; then # If we're compiling for MSVC then we, like most other distribution builders, # switch to clang as the compiler. This'll allow us eventually to enable LTO # amongst LLVM and rustc. Note that we only do this on MSVC as I don't think diff --git a/src/ci/scripts/install-mingw.sh b/src/ci/scripts/install-mingw.sh index 7eccb9b8650..87b835b63db 100755 --- a/src/ci/scripts/install-mingw.sh +++ b/src/ci/scripts/install-mingw.sh @@ -38,11 +38,11 @@ if isWindows; then ;; esac - if [[ "${CUSTOM_MINGW-0}" -ne 1 ]]; then - pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake \ - mingw-w64-$arch-gcc \ - mingw-w64-$arch-python # the python package is actually for python3 - ciCommandAddPath "$(ciCheckoutPath)/msys2/mingw${bits}/bin" + if [[ "${CUSTOM_MINGW:-0}" == 0 ]]; then + pacboy -S --noconfirm toolchain:p + # According to the comment in the Windows part of install-clang.sh, in the future we might + # want to do this instead: + # pacboy -S --noconfirm clang:p ... else mingw_dir="mingw${bits}" diff --git a/src/ci/scripts/install-msys2.sh b/src/ci/scripts/install-msys2.sh index 0aa4b42a6a8..905edf38a09 100755 --- a/src/ci/scripts/install-msys2.sh +++ b/src/ci/scripts/install-msys2.sh @@ -1,17 +1,12 @@ #!/bin/bash -# Download and install MSYS2, needed primarily for the test suite (run-make) but -# also used by the MinGW toolchain for assembling things. +# Clean up and prepare the MSYS2 installation. MSYS2 is needed primarily for +# the test suite (run-make), but is also used by the MinGW toolchain for assembling things. set -euo pipefail IFS=$'\n\t' source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" - if isWindows; then - msys2Path="c:/msys64" - mkdir -p "${msys2Path}/home/${USERNAME}" - ciCommandAddPath "${msys2Path}/usr/bin" - # Detect the native Python version installed on the agent. On GitHub # Actions, the C:\hostedtoolcache\windows\Python directory contains a # subdirectory for each installed Python version. @@ -29,4 +24,33 @@ if isWindows; then fi ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64" ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64\\Scripts" + + # Install pacboy for easily installing packages + pacman -S --noconfirm pactoys + + # Delete these pre-installed tools so we can't accidentally use them, because we are using the + # MSYS2 setup action versions instead. + # Delete pre-installed version of MSYS2 + rm -r "/c/msys64/" + # Delete Strawberry Perl, which contains a version of mingw + rm -r "/c/Strawberry/" + # Delete these other copies of mingw, I don't even know where they come from. + rm -r "/c/mingw64/" + rm -r "/c/mingw32/" + + if isKnownToBeMingwBuild; then + # Use the mingw version of CMake for mingw builds. + # However, the MSVC build needs native CMake, as it fails with the mingw one. + # Delete native CMake + rm -r "/c/Program Files/CMake/" + # Install mingw-w64-$arch-cmake + pacboy -S --noconfirm cmake:p + + # We use Git-for-Windows for MSVC builds, and MSYS2 Git for mingw builds, + # so that both are tested. + # Delete Windows-Git + rm -r "/c/Program Files/Git/" + # Install MSYS2 git + pacman -S --noconfirm git + fi fi diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 720394af249..2b0a10e4d08 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -52,6 +52,10 @@ function isLinux { [[ "${OSTYPE}" = "linux-gnu" ]] } +function isKnownToBeMingwBuild { + isGitHubActions && [[ "${CI_JOB_NAME}" == *mingw ]] +} + function isCiBranch { if [[ $# -ne 1 ]]; then echo "usage: $0 " From e27c4722d364e0367a436f7d89c493eeb8ccf8ee Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Tue, 13 Feb 2024 06:11:42 +0000 Subject: [PATCH 13/92] Update INSTALL.md instructions for MinGW --- INSTALL.md | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index b872d317e36..d7e0fd72044 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -145,10 +145,9 @@ toolchain. 1. Download the latest [MSYS2 installer][msys2] and go through the installer. -2. Run `mingw32_shell.bat` or `mingw64_shell.bat` from the MSYS2 installation - directory (e.g. `C:\msys64`), depending on whether you want 32-bit or 64-bit - Rust. (As of the latest version of MSYS2 you have to run `msys2_shell.cmd - -mingw32` or `msys2_shell.cmd -mingw64` from the command line instead.) +2. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit + or 64-bit Rust) either from your start menu, or by running `mingw64.exe` + or `mingw32.exe` from your MSYS2 installation directory (e.g. `C:\msys64`). 3. From this terminal, install the required tools: @@ -157,8 +156,7 @@ toolchain. pacman -Sy pacman-mirrors # Install build tools needed for Rust. If you're building a 32-bit compiler, - # then replace "x86_64" below with "i686". If you've already got Git, Python, - # or CMake installed and in PATH you can remove them from this list. + # then replace "x86_64" below with "i686". # Note that it is important that you do **not** use the 'python2', 'cmake', # and 'ninja' packages from the 'msys2' subsystem. # The build has historically been known to fail with these packages. @@ -175,9 +173,21 @@ toolchain. 4. Navigate to Rust's source code (or clone it), then build it: ```sh - python x.py setup user && python x.py build && python x.py install + python x.py setup dist && python x.py build && python x.py install ``` +If you want to use the native versions of Git, Python, or CMake you can remove +them from the above pacman command and install them from another source. Make +sure that they're in your Windows PATH, and edit the relevant `mingw[32|64].ini` +file in your MSYS2 installation directory by uncommenting the line +`MSYS2_PATH_TYPE=inherit` to include them in your MSYS2 PATH. + +Using Windows native Python can be helpful if you get errors when building LLVM. +You may also want to use Git for Windows, as it is often *much* faster. Turning +off real-time protection in the Windows Virus & Threat protections settings can +also help with long run times (although note that it will automatically turn +itself back on after some time). + ### MSVC MSVC builds of Rust additionally require an installation of Visual Studio 2017 From c1144436f69fb3c235dd0dbf90f1833bec830856 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Jan 2024 13:50:06 +0100 Subject: [PATCH 14/92] Make synstructure underscore_const(true) the default since otherwise it will trigger the non_local_definitions lint --- compiler/rustc_macros/src/diagnostics/mod.rs | 9 ++++++--- compiler/rustc_macros/src/hash_stable.rs | 2 ++ compiler/rustc_macros/src/lift.rs | 1 + compiler/rustc_macros/src/serialize.rs | 12 +++++++++++- compiler/rustc_macros/src/type_foldable.rs | 2 ++ compiler/rustc_macros/src/type_visitable.rs | 2 ++ 6 files changed, 24 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs index 33dffe6998a..044bbadf41c 100644 --- a/compiler/rustc_macros/src/diagnostics/mod.rs +++ b/compiler/rustc_macros/src/diagnostics/mod.rs @@ -55,7 +55,8 @@ use synstructure::Structure; /// /// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`: /// -pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); DiagnosticDerive::new(s).into_tokens() } @@ -101,7 +102,8 @@ pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`: /// -pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); LintDiagnosticDerive::new(s).into_tokens() } @@ -151,6 +153,7 @@ pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident }); /// ``` -pub fn session_subdiagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); SubdiagnosticDeriveBuilder::new().into_tokens(s) } diff --git a/compiler/rustc_macros/src/hash_stable.rs b/compiler/rustc_macros/src/hash_stable.rs index a6396ba687d..6b3210cad7b 100644 --- a/compiler/rustc_macros/src/hash_stable.rs +++ b/compiler/rustc_macros/src/hash_stable.rs @@ -74,6 +74,8 @@ fn hash_stable_derive_with_mode( HashStableMode::Generic | HashStableMode::NoContext => parse_quote!(__CTX), }; + s.underscore_const(true); + // no_context impl is able to derive by-field, which is closer to a perfect derive. s.add_bounds(match mode { HashStableMode::Normal | HashStableMode::Generic => synstructure::AddBounds::Generics, diff --git a/compiler/rustc_macros/src/lift.rs b/compiler/rustc_macros/src/lift.rs index 3dedd88fb19..f7a84ba1510 100644 --- a/compiler/rustc_macros/src/lift.rs +++ b/compiler/rustc_macros/src/lift.rs @@ -4,6 +4,7 @@ use syn::parse_quote; pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { s.add_bounds(synstructure::AddBounds::Generics); s.bind_with(|_| synstructure::BindStyle::Move); + s.underscore_const(true); let tcx: syn::Lifetime = parse_quote!('tcx); let newtcx: syn::GenericParam = parse_quote!('__lifted); diff --git a/compiler/rustc_macros/src/serialize.rs b/compiler/rustc_macros/src/serialize.rs index 98b53945b91..5fa11d22f0e 100644 --- a/compiler/rustc_macros/src/serialize.rs +++ b/compiler/rustc_macros/src/serialize.rs @@ -15,6 +15,7 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -26,6 +27,7 @@ pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let decoder_ty = quote! { DecodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -34,6 +36,7 @@ pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_span::SpanDecoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -42,12 +45,13 @@ pub fn decodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_serialize::Decoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } fn decodable_body( - s: synstructure::Structure<'_>, + mut s: synstructure::Structure<'_>, decoder_ty: TokenStream, ) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -93,6 +97,7 @@ fn decodable_body( } } }; + s.underscore_const(true); s.bound_impl( quote!(::rustc_serialize::Decodable<#decoder_ty>), @@ -130,6 +135,7 @@ pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -141,6 +147,7 @@ pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let encoder_ty = quote! { EncodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, true) } @@ -149,6 +156,7 @@ pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -157,6 +165,7 @@ pub fn encodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -170,6 +179,7 @@ fn encodable_body( panic!("cannot derive on union") } + s.underscore_const(true); s.bind_with(|binding| { // Handle the lack of a blanket reference impl. if let syn::Type::Reference(_) = binding.ast().ty { diff --git a/compiler/rustc_macros/src/type_foldable.rs b/compiler/rustc_macros/src/type_foldable.rs index 5ee4d879313..5617c53b119 100644 --- a/compiler/rustc_macros/src/type_foldable.rs +++ b/compiler/rustc_macros/src/type_foldable.rs @@ -6,6 +6,8 @@ pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:: panic!("cannot derive on union") } + s.underscore_const(true); + if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { s.add_impl_generic(parse_quote! { 'tcx }); } diff --git a/compiler/rustc_macros/src/type_visitable.rs b/compiler/rustc_macros/src/type_visitable.rs index dcd505a105e..c8430380345 100644 --- a/compiler/rustc_macros/src/type_visitable.rs +++ b/compiler/rustc_macros/src/type_visitable.rs @@ -6,6 +6,8 @@ pub fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: panic!("cannot derive on union") } + s.underscore_const(true); + // ignore fields with #[type_visitable(ignore)] s.filter(|bi| { let mut ignored = false; From 61703943132eb4a509646bf63e0199268c6df4a2 Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 15:25:18 +0100 Subject: [PATCH 15/92] Implement RFC3373 non local definitions lint --- Cargo.lock | 1 + compiler/rustc_lint/Cargo.toml | 1 + compiler/rustc_lint/messages.ftl | 20 + compiler/rustc_lint/src/lib.rs | 3 + compiler/rustc_lint/src/lints.rs | 17 + compiler/rustc_lint/src/non_local_def.rs | 187 ++++++ tests/ui/lint/non_local_definitions.rs | 373 +++++++++++ tests/ui/lint/non_local_definitions.stderr | 611 ++++++++++++++++++ tests/ui/proc-macro/nested-macro-rules.rs | 1 + tests/ui/proc-macro/nested-macro-rules.stderr | 27 + 10 files changed, 1241 insertions(+) create mode 100644 compiler/rustc_lint/src/non_local_def.rs create mode 100644 tests/ui/lint/non_local_definitions.rs create mode 100644 tests/ui/lint/non_local_definitions.stderr create mode 100644 tests/ui/proc-macro/nested-macro-rules.stderr diff --git a/Cargo.lock b/Cargo.lock index f9ad78e3795..327186bc9b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4164,6 +4164,7 @@ dependencies = [ "rustc_target", "rustc_trait_selection", "rustc_type_ir", + "smallvec", "tracing", "unicode-security", ] diff --git a/compiler/rustc_lint/Cargo.toml b/compiler/rustc_lint/Cargo.toml index fa1133e7780..2271321b8bf 100644 --- a/compiler/rustc_lint/Cargo.toml +++ b/compiler/rustc_lint/Cargo.toml @@ -23,6 +23,7 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" unicode-security = "0.1.0" # tidy-alphabetical-end diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 785895e0ab8..4e0ba376b7d 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -411,6 +411,26 @@ lint_non_fmt_panic_unused = } .add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally +lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation + .help = + move this `impl` block outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type + +lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation + .help = + remove the `#[macro_export]` or move this `macro_rules!` outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module + lint_non_snake_case = {$sort} `{$name}` should have a snake case name .rename_or_convert_suggestion = rename the identifier or convert it to a snake case raw identifier .cannot_convert_note = `{$sc}` cannot be used as a raw identifier diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 85f9d3bd63e..f6d2b758525 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -70,6 +70,7 @@ mod methods; mod multiple_supertrait_upcastable; mod non_ascii_idents; mod non_fmt_panic; +mod non_local_def; mod nonstandard_style; mod noop_method_call; mod opaque_hidden_inferred_bound; @@ -105,6 +106,7 @@ use methods::*; use multiple_supertrait_upcastable::*; use non_ascii_idents::*; use non_fmt_panic::NonPanicFmt; +use non_local_def::*; use nonstandard_style::*; use noop_method_call::*; use opaque_hidden_inferred_bound::*; @@ -231,6 +233,7 @@ late_lint_methods!( MissingDebugImplementations: MissingDebugImplementations, MissingDoc: MissingDoc, AsyncFnInTrait: AsyncFnInTrait, + NonLocalDefinitions: NonLocalDefinitions::default(), ] ] ); diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index da59ffebdc5..15f158961d0 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1293,6 +1293,23 @@ pub struct SuspiciousDoubleRefCloneDiag<'a> { pub ty: Ty<'a>, } +// non_local_defs.rs +#[derive(LintDiagnostic)] +pub enum NonLocalDefinitionsDiag { + #[diag(lint_non_local_definitions_impl)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + Impl { depth: u32, body_kind_descr: &'static str, body_name: String }, + #[diag(lint_non_local_definitions_macro_rules)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + MacroRules { depth: u32, body_kind_descr: &'static str, body_name: String }, +} + // pass_by_value.rs #[derive(LintDiagnostic)] #[diag(lint_pass_by_value)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs new file mode 100644 index 00000000000..28bd49f36a0 --- /dev/null +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -0,0 +1,187 @@ +use rustc_hir::{def::DefKind, Body, Item, ItemKind, Path, QPath, TyKind}; +use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; + +use smallvec::{smallvec, SmallVec}; + +use crate::{lints::NonLocalDefinitionsDiag, LateContext, LateLintPass, LintContext}; + +declare_lint! { + /// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]` + /// macro inside bodies (functions, enum discriminant, ...). + /// + /// ### Example + /// + /// ```rust + /// trait MyTrait {} + /// struct MyStruct; + /// + /// fn foo() { + /// impl MyTrait for MyStruct {} + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Creating non-local definitions go against expectation and can create discrepancies + /// in tooling. It should be avoided. It may become deny-by-default in edition 2024 + /// and higher, see see the tracking issue . + /// + /// An `impl` definition is non-local if it is nested inside an item and neither + /// the type nor the trait are at the same nesting level as the `impl` block. + /// + /// All nested bodies (functions, enum discriminant, array length, consts) (expect for + /// `const _: Ty = { ... }` in top-level module, which is still undecided) are checked. + pub NON_LOCAL_DEFINITIONS, + Warn, + "checks for non-local definitions", + report_in_external_macro +} + +#[derive(Default)] +pub struct NonLocalDefinitions { + body_depth: u32, +} + +impl_lint_pass!(NonLocalDefinitions => [NON_LOCAL_DEFINITIONS]); + +// FIXME(Urgau): Figure out how to handle modules nested in bodies. +// It's currently not handled by the current logic because modules are not bodies. +// They don't even follow the correct order (check_body -> check_mod -> check_body_post) +// instead check_mod is called after every body has been handled. + +impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { + fn check_body(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth += 1; + } + + fn check_body_post(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth -= 1; + } + + fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { + if self.body_depth == 0 { + return; + } + + let parent = cx.tcx.parent(item.owner_id.def_id.into()); + let parent_def_kind = cx.tcx.def_kind(parent); + let parent_opt_item_name = cx.tcx.opt_item_name(parent); + + // Per RFC we (currently) ignore anon-const (`const _: Ty = ...`) in top-level module. + if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + return; + } + + match item.kind { + ItemKind::Impl(impl_) => { + // The RFC states: + // + // > An item nested inside an expression-containing item (through any + // > level of nesting) may not define an impl Trait for Type unless + // > either the **Trait** or the **Type** is also nested inside the + // > same expression-containing item. + // + // To achieve this we get try to get the paths of the _Trait_ and + // _Type_, and we look inside thoses paths to try a find in one + // of them a type whose parent is the same as the impl definition. + // + // If that's the case this means that this impl block declaration + // is using local items and so we don't lint on it. + + // We also ignore anon-const in item by including the anon-const + // parent as well; and since it's quite uncommon, we use smallvec + // to avoid unnecessary heap allocations. + let local_parents: SmallVec<[DefId; 1]> = if parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + smallvec![parent, cx.tcx.parent(parent)] + } else { + smallvec![parent] + }; + + let self_ty_has_local_parent = match impl_.self_ty.kind { + TyKind::Path(QPath::Resolved(_, ty_path)) => { + path_has_local_parent(ty_path, cx, &*local_parents) + } + TyKind::TraitObject([principle_poly_trait_ref, ..], _, _) => { + path_has_local_parent( + principle_poly_trait_ref.trait_ref.path, + cx, + &*local_parents, + ) + } + TyKind::TraitObject([], _, _) + | TyKind::InferDelegation(_, _) + | TyKind::Slice(_) + | TyKind::Array(_, _) + | TyKind::Ptr(_) + | TyKind::Ref(_, _) + | TyKind::BareFn(_) + | TyKind::Never + | TyKind::Tup(_) + | TyKind::Path(_) + | TyKind::AnonAdt(_) + | TyKind::OpaqueDef(_, _, _) + | TyKind::Typeof(_) + | TyKind::Infer + | TyKind::Err(_) => false, + }; + + let of_trait_has_local_parent = impl_ + .of_trait + .map(|of_trait| path_has_local_parent(of_trait.path, cx, &*local_parents)) + .unwrap_or(false); + + // If none of them have a local parent (LOGICAL NOR) this means that + // this impl definition is a non-local definition and so we lint on it. + if !(self_ty_has_local_parent || of_trait_has_local_parent) { + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::Impl { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "".to_string()), + }, + ) + } + } + ItemKind::Macro(_macro, MacroKind::Bang) + if cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export) => + { + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::MacroRules { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "".to_string()), + }, + ) + } + _ => {} + } + } +} + +/// Given a path and a parent impl def id, this checks if the if parent resolution +/// def id correspond to the def id of the parent impl definition. +/// +/// Given this path, we will look at the path (and ignore any generic args): +/// +/// ```text +/// std::convert::PartialEq> +/// ^^^^^^^^^^^^^^^^^^^^^^^ +/// ``` +fn path_has_local_parent(path: &Path<'_>, cx: &LateContext<'_>, local_parents: &[DefId]) -> bool { + path.res.opt_def_id().is_some_and(|did| local_parents.contains(&cx.tcx.parent(did))) +} diff --git a/tests/ui/lint/non_local_definitions.rs b/tests/ui/lint/non_local_definitions.rs new file mode 100644 index 00000000000..986efbfcf0f --- /dev/null +++ b/tests/ui/lint/non_local_definitions.rs @@ -0,0 +1,373 @@ +//@ check-pass +//@ edition:2021 + +#![feature(inline_const)] + +use std::fmt::{Debug, Display}; + +struct Test; + +impl Debug for Test { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } +} + +mod do_not_lint_mod { + pub trait Tait {} + + impl super::Test { + fn hugo() {} + } + + impl Tait for super::Test {} +} + +trait Uto {} +const Z: () = { + trait Uto1 {} + + impl Uto1 for Test {} // the trait is local, don't lint + + impl Uto for &Test {} + //~^ WARN non-local `impl` definition +}; + +trait Ano {} +const _: () = { + impl Ano for &Test {} // ignored since the parent is an anon-const +}; + +type A = [u32; { + impl Uto for *mut Test {} + //~^ WARN non-local `impl` definition + + 1 +}]; + +enum Enum { + Discr = { + impl Uto for Test {} + //~^ WARN non-local `impl` definition + + 1 + } +} + +trait Uto2 {} +static A: u32 = { + impl Uto2 for Test {} + //~^ WARN non-local `impl` definition + + 1 +}; + +trait Uto3 {} +const B: u32 = { + impl Uto3 for Test {} + //~^ WARN non-local `impl` definition + + #[macro_export] + macro_rules! m0 { () => { } }; + //~^ WARN non-local `macro_rules!` definition + + trait Uto4 {} + impl Uto4 for Test {} + + 1 +}; + +trait Uto5 {} +fn main() { + #[macro_export] + macro_rules! m { () => { } }; + //~^ WARN non-local `macro_rules!` definition + + impl Test { + //~^ WARN non-local `impl` definition + fn foo() {} + } + + let _array = [0i32; { + impl Test { + //~^ WARN non-local `impl` definition + fn bar() {} + } + + 1 + }]; + + const { + impl Test { + //~^ WARN non-local `impl` definition + fn hoo() {} + } + + 1 + }; + + const _: u32 = { + impl Test { + //~^ WARN non-local `impl` definition + fn foo2() {} + } + + 1 + }; + + impl Display for Test { + //~^ WARN non-local `impl` definition + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl dyn Uto5 {} + //~^ WARN non-local `impl` definition + + impl Uto5 for Vec { } + //~^ WARN non-local `impl` definition + + impl Uto5 for &dyn Uto5 {} + //~^ WARN non-local `impl` definition + + impl Uto5 for *mut Test {} + //~^ WARN non-local `impl` definition + + impl Uto5 for *mut [Test] {} + //~^ WARN non-local `impl` definition + + impl Uto5 for [Test; 8] {} + //~^ WARN non-local `impl` definition + + impl Uto5 for (Test,) {} + //~^ WARN non-local `impl` definition + + impl Uto5 for fn(Test) -> () {} + //~^ WARN non-local `impl` definition + + impl Uto5 for fn() -> Test {} + //~^ WARN non-local `impl` definition + + let _a = || { + impl Uto5 for Test {} + //~^ WARN non-local `impl` definition + + 1 + }; + + type A = [u32; { + impl Uto5 for &Test {} + //~^ WARN non-local `impl` definition + + 1 + }]; + + fn a(_: [u32; { + impl Uto5 for &(Test,) {} + //~^ WARN non-local `impl` definition + + 1 + }]) {} + + fn b() -> [u32; { + impl Uto5 for &(Test,Test) {} + //~^ WARN non-local `impl` definition + + 1 + }] { todo!() } + + struct InsideMain; + + impl Uto5 for *mut InsideMain {} + //~^ WARN non-local `impl` definition + impl Uto5 for *mut [InsideMain] {} + //~^ WARN non-local `impl` definition + impl Uto5 for [InsideMain; 8] {} + //~^ WARN non-local `impl` definition + impl Uto5 for (InsideMain,) {} + //~^ WARN non-local `impl` definition + impl Uto5 for fn(InsideMain) -> () {} + //~^ WARN non-local `impl` definition + impl Uto5 for fn() -> InsideMain {} + //~^ WARN non-local `impl` definition + + impl Debug for InsideMain { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl InsideMain { + fn foo() {} + } + + fn inside_inside() { + impl Display for InsideMain { + //~^ WARN non-local `impl` definition + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl InsideMain { + //~^ WARN non-local `impl` definition + fn bar() { + #[macro_export] + macro_rules! m2 { () => { } }; + //~^ WARN non-local `macro_rules!` definition + } + } + } + + trait Uto6 {} + impl dyn Uto6 {} + impl Uto5 for dyn Uto6 {} + + impl Uto3 for Vec { } + //~^ WARN non-local `impl` definition +} + +trait Uto7 {} +trait Uto8 {} + +fn bad() { + struct Local; + impl Uto7 for Test where Local: std::any::Any {} + //~^ WARN non-local `impl` definition + + impl Uto8 for T {} + //~^ WARN non-local `impl` definition +} + +struct UwU(T); + +fn fun() { + #[derive(Debug)] + struct OwO; + impl Default for UwU { + //~^ WARN non-local `impl` definition + fn default() -> Self { + UwU(OwO) + } + } +} + +struct Cat; + +fn meow() { + impl From for () { + //~^ WARN non-local `impl` definition + fn from(_: Cat) -> () { + todo!() + } + } + + #[derive(Debug)] + struct Cat; + impl AsRef for () { + //~^ WARN non-local `impl` definition + fn as_ref(&self) -> &Cat { &Cat } + } +} + +struct G; + +fn fun2() { + #[derive(Debug, Default)] + struct B; + impl PartialEq for G { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &B) -> bool { + true + } + } +} + +fn side_effects() { + dbg!(().as_ref()); // prints `Cat` + dbg!(UwU::default().0); + let _ = G::eq(&G, dbg!(&<_>::default())); +} + +struct Dog; + +fn woof() { + impl PartialEq for &Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } + + impl PartialEq<()> for Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &()) -> bool { + todo!() + } + } + + impl PartialEq<()> for &Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &()) -> bool { + todo!() + } + } + + impl PartialEq for () { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } + + struct Test; + impl PartialEq for Test { + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } +} + +struct Wrap(T); + +impl Wrap>> {} + +fn rawr() { + struct Lion; + + impl From>> for () { + //~^ WARN non-local `impl` definition + fn from(_: Wrap>) -> Self { + todo!() + } + } + + impl From<()> for Wrap { + //~^ WARN non-local `impl` definition + fn from(_: ()) -> Self { + todo!() + } + } +} + +macro_rules! m { + () => { + trait MacroTrait {} + struct OutsideStruct; + fn my_func() { + impl MacroTrait for OutsideStruct {} + //~^ WARN non-local `impl` definition + } + } +} + +m!(); + +fn bitflags() { + struct Flags; + + const _: () = { + impl Flags {} + }; +} diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr new file mode 100644 index 00000000000..f15457734bc --- /dev/null +++ b/tests/ui/lint/non_local_definitions.stderr @@ -0,0 +1,611 @@ +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:32:5 + | +LL | impl Uto for &Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `Z` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: `#[warn(non_local_definitions)]` on by default + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:42:5 + | +LL | impl Uto for *mut Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:50:9 + | +LL | impl Uto for Test {} + | ^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:59:5 + | +LL | impl Uto2 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current static `A` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:67:5 + | +LL | impl Uto3 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `B` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:71:5 + | +LL | macro_rules! m0 { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current constant `B` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:83:5 + | +LL | macro_rules! m { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current function `main` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:86:5 + | +LL | / impl Test { +LL | | +LL | | fn foo() {} +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:92:9 + | +LL | / impl Test { +LL | | +LL | | fn bar() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:101:9 + | +LL | / impl Test { +LL | | +LL | | fn hoo() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current inline constant `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:110:9 + | +LL | / impl Test { +LL | | +LL | | fn foo2() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current constant `_` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:118:5 + | +LL | / impl Display for Test { +LL | | +LL | | fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:125:5 + | +LL | impl dyn Uto5 {} + | ^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:128:5 + | +LL | impl Uto5 for Vec { } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:131:5 + | +LL | impl Uto5 for &dyn Uto5 {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:134:5 + | +LL | impl Uto5 for *mut Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:137:5 + | +LL | impl Uto5 for *mut [Test] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:140:5 + | +LL | impl Uto5 for [Test; 8] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:143:5 + | +LL | impl Uto5 for (Test,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:146:5 + | +LL | impl Uto5 for fn(Test) -> () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:149:5 + | +LL | impl Uto5 for fn() -> Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:153:9 + | +LL | impl Uto5 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current closure `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:160:9 + | +LL | impl Uto5 for &Test {} + | ^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:167:9 + | +LL | impl Uto5 for &(Test,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:174:9 + | +LL | impl Uto5 for &(Test,Test) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:182:5 + | +LL | impl Uto5 for *mut InsideMain {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:184:5 + | +LL | impl Uto5 for *mut [InsideMain] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:186:5 + | +LL | impl Uto5 for [InsideMain; 8] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:188:5 + | +LL | impl Uto5 for (InsideMain,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:190:5 + | +LL | impl Uto5 for fn(InsideMain) -> () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:192:5 + | +LL | impl Uto5 for fn() -> InsideMain {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:206:9 + | +LL | / impl Display for InsideMain { +LL | | +LL | | fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +LL | | todo!() +LL | | } +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current function `inside_inside` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:213:9 + | +LL | / impl InsideMain { +LL | | +LL | | fn bar() { +LL | | #[macro_export] +... | +LL | | } +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current function `inside_inside` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:217:17 + | +LL | macro_rules! m2 { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current associated function `bar` and up 3 bodies + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:227:5 + | +LL | impl Uto3 for Vec { } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:236:5 + | +LL | impl Uto7 for Test where Local: std::any::Any {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `bad` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:239:5 + | +LL | impl Uto8 for T {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `bad` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:248:5 + | +LL | / impl Default for UwU { +LL | | +LL | | fn default() -> Self { +LL | | UwU(OwO) +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `fun` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:259:5 + | +LL | / impl From for () { +LL | | +LL | | fn from(_: Cat) -> () { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `meow` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:268:5 + | +LL | / impl AsRef for () { +LL | | +LL | | fn as_ref(&self) -> &Cat { &Cat } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `meow` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:279:5 + | +LL | / impl PartialEq for G { +LL | | +LL | | fn eq(&self, _: &B) -> bool { +LL | | true +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `fun2` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:296:5 + | +LL | / impl PartialEq for &Dog { +LL | | +LL | | fn eq(&self, _: &Dog) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:303:5 + | +LL | / impl PartialEq<()> for Dog { +LL | | +LL | | fn eq(&self, _: &()) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:310:5 + | +LL | / impl PartialEq<()> for &Dog { +LL | | +LL | | fn eq(&self, _: &()) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:317:5 + | +LL | / impl PartialEq for () { +LL | | +LL | | fn eq(&self, _: &Dog) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:339:5 + | +LL | / impl From>> for () { +LL | | +LL | | fn from(_: Wrap>) -> Self { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `rawr` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:346:5 + | +LL | / impl From<()> for Wrap { +LL | | +LL | | fn from(_: ()) -> Self { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `rawr` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:359:13 + | +LL | impl MacroTrait for OutsideStruct {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +... +LL | m!(); + | ---- in this macro invocation + | + = help: move this `impl` block outside the of the current function `my_func` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: this warning originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: 48 warnings emitted + diff --git a/tests/ui/proc-macro/nested-macro-rules.rs b/tests/ui/proc-macro/nested-macro-rules.rs index bb25b97df50..0dce3c408c2 100644 --- a/tests/ui/proc-macro/nested-macro-rules.rs +++ b/tests/ui/proc-macro/nested-macro-rules.rs @@ -19,5 +19,6 @@ fn main() { nested_macro_rules::inner_macro!(print_bang, print_attr); nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct); + //~^ WARN non-local `macro_rules!` definition inner_macro!(print_bang, print_attr); } diff --git a/tests/ui/proc-macro/nested-macro-rules.stderr b/tests/ui/proc-macro/nested-macro-rules.stderr new file mode 100644 index 00000000000..111be882771 --- /dev/null +++ b/tests/ui/proc-macro/nested-macro-rules.stderr @@ -0,0 +1,27 @@ +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/auxiliary/nested-macro-rules.rs:7:9 + | +LL | macro_rules! outer_macro { + | ------------------------ in this expansion of `nested_macro_rules::outer_macro!` +... +LL | / macro_rules! inner_macro { +LL | | ($bang_macro:ident, $attr_macro:ident) => { +LL | | $bang_macro!($name); +LL | | #[$attr_macro] struct $attr_struct_name {} +LL | | } +LL | | } + | |_________^ + | + ::: $DIR/nested-macro-rules.rs:21:5 + | +LL | nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct); + | ---------------------------------------------------------------- in this macro invocation + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current function `main` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: `#[warn(non_local_definitions)]` on by default + +warning: 1 warning emitted + From 80c81c53ace700baf07d7d51c90e55adb2b3ff8f Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 17:14:38 +0100 Subject: [PATCH 16/92] Allow newly added non_local_definitions lint in tests --- tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs | 2 ++ tests/ui/const-generics/min_const_generics/macro.rs | 2 ++ tests/ui/consts/const_in_pattern/accept_structural.rs | 1 + tests/ui/drop/dropck-eyepatch-reorder.rs | 1 + tests/ui/drop/dropck-eyepatch.rs | 1 + tests/ui/imports/local-modularized-tricky-pass-2.rs | 1 + tests/ui/issues/issue-31776.rs | 1 + tests/ui/issues/issue-41053.rs | 2 ++ tests/ui/macros/type-macros-simple.rs | 2 ++ tests/ui/packed/issue-46152.rs | 1 + tests/ui/privacy/associated-item-privacy-trait.rs | 2 +- tests/ui/privacy/private-in-public-non-principal.rs | 1 + tests/ui/privacy/private-in-public-non-principal.stderr | 8 ++++---- tests/ui/rust-2018/uniform-paths/issue-55779.rs | 2 ++ 14 files changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs b/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs index 28705bfc0c8..1a2aae8fb23 100644 --- a/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs +++ b/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs @@ -5,6 +5,8 @@ //@ edition:2018 +#![allow(non_local_definitions)] + struct A<'a, 'b>(&'a &'b i32); struct B<'a>(&'a i32); diff --git a/tests/ui/const-generics/min_const_generics/macro.rs b/tests/ui/const-generics/min_const_generics/macro.rs index b7e8083a861..52f47628f8f 100644 --- a/tests/ui/const-generics/min_const_generics/macro.rs +++ b/tests/ui/const-generics/min_const_generics/macro.rs @@ -1,4 +1,6 @@ //@ run-pass +#![allow(non_local_definitions)] + struct Example; macro_rules! external_macro { diff --git a/tests/ui/consts/const_in_pattern/accept_structural.rs b/tests/ui/consts/const_in_pattern/accept_structural.rs index 09142c56157..31d3b6e7331 100644 --- a/tests/ui/consts/const_in_pattern/accept_structural.rs +++ b/tests/ui/consts/const_in_pattern/accept_structural.rs @@ -1,5 +1,6 @@ //@ run-pass +#![allow(non_local_definitions)] #![warn(indirect_structural_match)] // This test is checking our logic for structural match checking by enumerating diff --git a/tests/ui/drop/dropck-eyepatch-reorder.rs b/tests/ui/drop/dropck-eyepatch-reorder.rs index 6b394414bae..b985beee9ec 100644 --- a/tests/ui/drop/dropck-eyepatch-reorder.rs +++ b/tests/ui/drop/dropck-eyepatch-reorder.rs @@ -1,5 +1,6 @@ //@ run-pass #![feature(dropck_eyepatch)] +#![allow(non_local_definitions)] // The point of this test is to test uses of `#[may_dangle]` attribute // where the formal declaration order (in the impl generics) does not diff --git a/tests/ui/drop/dropck-eyepatch.rs b/tests/ui/drop/dropck-eyepatch.rs index 2f27b72da5a..2dffe6aba17 100644 --- a/tests/ui/drop/dropck-eyepatch.rs +++ b/tests/ui/drop/dropck-eyepatch.rs @@ -1,5 +1,6 @@ //@ run-pass #![feature(dropck_eyepatch)] +#![allow(non_local_definitions)] // The point of this test is to illustrate that the `#[may_dangle]` // attribute specifically allows, in the context of a type diff --git a/tests/ui/imports/local-modularized-tricky-pass-2.rs b/tests/ui/imports/local-modularized-tricky-pass-2.rs index 581bab467f5..1cf97c5aa06 100644 --- a/tests/ui/imports/local-modularized-tricky-pass-2.rs +++ b/tests/ui/imports/local-modularized-tricky-pass-2.rs @@ -4,6 +4,7 @@ // into the root module soon enough to act as usual items and shadow globs and preludes. #![feature(decl_macro)] +#![allow(non_local_definitions)] // `macro_export` shadows globs use inner1::*; diff --git a/tests/ui/issues/issue-31776.rs b/tests/ui/issues/issue-31776.rs index 632defbcf27..4b342a0e3b2 100644 --- a/tests/ui/issues/issue-31776.rs +++ b/tests/ui/issues/issue-31776.rs @@ -1,6 +1,7 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] // Various scenarios in which `pub` is required in blocks struct S; diff --git a/tests/ui/issues/issue-41053.rs b/tests/ui/issues/issue-41053.rs index f46bf6b4aa1..18f9e209c33 100644 --- a/tests/ui/issues/issue-41053.rs +++ b/tests/ui/issues/issue-41053.rs @@ -1,6 +1,8 @@ //@ run-pass //@ aux-build:issue-41053.rs +#![allow(non_local_definitions)] + pub trait Trait { fn foo(&self) {} } pub struct Foo; diff --git a/tests/ui/macros/type-macros-simple.rs b/tests/ui/macros/type-macros-simple.rs index 4d1001baf59..d189b881f7d 100644 --- a/tests/ui/macros/type-macros-simple.rs +++ b/tests/ui/macros/type-macros-simple.rs @@ -1,6 +1,8 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] + macro_rules! Tuple { { $A:ty,$B:ty } => { ($A, $B) } } diff --git a/tests/ui/packed/issue-46152.rs b/tests/ui/packed/issue-46152.rs index e38b445107b..5b2e4bbfdf2 100644 --- a/tests/ui/packed/issue-46152.rs +++ b/tests/ui/packed/issue-46152.rs @@ -1,6 +1,7 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] #![feature(unsize, coerce_unsized)] #[repr(packed)] diff --git a/tests/ui/privacy/associated-item-privacy-trait.rs b/tests/ui/privacy/associated-item-privacy-trait.rs index f038ae9e261..655d892e244 100644 --- a/tests/ui/privacy/associated-item-privacy-trait.rs +++ b/tests/ui/privacy/associated-item-privacy-trait.rs @@ -1,5 +1,5 @@ #![feature(decl_macro, associated_type_defaults)] -#![allow(private_interfaces, private_bounds)] +#![allow(private_interfaces, private_bounds, non_local_definitions)] mod priv_trait { trait PrivTr { diff --git a/tests/ui/privacy/private-in-public-non-principal.rs b/tests/ui/privacy/private-in-public-non-principal.rs index e348a181651..8dc90919bc9 100644 --- a/tests/ui/privacy/private-in-public-non-principal.rs +++ b/tests/ui/privacy/private-in-public-non-principal.rs @@ -1,5 +1,6 @@ #![feature(auto_traits)] #![feature(negative_impls)] +#![allow(non_local_definitions)] pub trait PubPrincipal {} auto trait PrivNonPrincipal {} diff --git a/tests/ui/privacy/private-in-public-non-principal.stderr b/tests/ui/privacy/private-in-public-non-principal.stderr index 73f2249bc6c..5aa08d3f071 100644 --- a/tests/ui/privacy/private-in-public-non-principal.stderr +++ b/tests/ui/privacy/private-in-public-non-principal.stderr @@ -1,24 +1,24 @@ warning: trait `PrivNonPrincipal` is more private than the item `leak_dyn_nonprincipal` - --> $DIR/private-in-public-non-principal.rs:7:1 + --> $DIR/private-in-public-non-principal.rs:8:1 | LL | pub fn leak_dyn_nonprincipal() -> Box { loop {} } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ function `leak_dyn_nonprincipal` is reachable at visibility `pub` | note: but trait `PrivNonPrincipal` is only usable at visibility `pub(crate)` - --> $DIR/private-in-public-non-principal.rs:5:1 + --> $DIR/private-in-public-non-principal.rs:6:1 | LL | auto trait PrivNonPrincipal {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: `#[warn(private_interfaces)]` on by default error: missing documentation for an associated function - --> $DIR/private-in-public-non-principal.rs:13:9 + --> $DIR/private-in-public-non-principal.rs:14:9 | LL | pub fn check_doc_lint() {} | ^^^^^^^^^^^^^^^^^^^^^^^ | note: the lint level is defined here - --> $DIR/private-in-public-non-principal.rs:10:8 + --> $DIR/private-in-public-non-principal.rs:11:8 | LL | #[deny(missing_docs)] | ^^^^^^^^^^^^ diff --git a/tests/ui/rust-2018/uniform-paths/issue-55779.rs b/tests/ui/rust-2018/uniform-paths/issue-55779.rs index 350ab324682..246b8dd82c5 100644 --- a/tests/ui/rust-2018/uniform-paths/issue-55779.rs +++ b/tests/ui/rust-2018/uniform-paths/issue-55779.rs @@ -2,6 +2,8 @@ //@ edition:2018 //@ aux-crate:issue_55779_extern_trait=issue-55779-extern-trait.rs +#![allow(non_local_definitions)] + use issue_55779_extern_trait::Trait; struct Local; From 01bcc60ecd8aa4bf7d43dd13694e003c84ad19ee Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 18:48:18 +0100 Subject: [PATCH 17/92] Allow newly added non_local_definitions lint in clippy --- .../undocumented_unsafe_blocks.rs | 2 +- .../clippy/tests/ui/bool_comparison.fixed | 2 +- src/tools/clippy/tests/ui/bool_comparison.rs | 2 +- src/tools/clippy/tests/ui/crashes/ice-4760.rs | 2 + src/tools/clippy/tests/ui/crashes/ice-6179.rs | 1 + .../tests/ui/explicit_into_iter_loop.fixed | 1 + .../tests/ui/explicit_into_iter_loop.rs | 1 + .../tests/ui/explicit_into_iter_loop.stderr | 12 ++--- .../clippy/tests/ui/explicit_iter_loop.fixed | 3 +- .../clippy/tests/ui/explicit_iter_loop.rs | 3 +- .../clippy/tests/ui/explicit_iter_loop.stderr | 36 ++++++------- .../clippy/tests/ui/from_over_into.fixed | 1 + src/tools/clippy/tests/ui/from_over_into.rs | 1 + .../clippy/tests/ui/from_over_into.stderr | 14 ++--- .../clippy/tests/ui/manual_str_repeat.fixed | 1 + .../clippy/tests/ui/manual_str_repeat.rs | 1 + .../clippy/tests/ui/manual_str_repeat.stderr | 20 +++---- .../clippy/tests/ui/needless_borrow.fixed | 1 + src/tools/clippy/tests/ui/needless_borrow.rs | 1 + .../clippy/tests/ui/needless_borrow.stderr | 54 +++++++++---------- 20 files changed, 86 insertions(+), 73 deletions(-) diff --git a/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs b/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs index a2781398760..8997073c8a5 100644 --- a/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs +++ b/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs @@ -4,7 +4,7 @@ //@[disabled] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/undocumented_unsafe_blocks/disabled #![warn(clippy::undocumented_unsafe_blocks, clippy::unnecessary_safety_comment)] -#![allow(deref_nullptr, clippy::let_unit_value, clippy::missing_safety_doc)] +#![allow(deref_nullptr, non_local_definitions, clippy::let_unit_value, clippy::missing_safety_doc)] #![feature(lint_reasons)] extern crate proc_macro_unsafe; diff --git a/src/tools/clippy/tests/ui/bool_comparison.fixed b/src/tools/clippy/tests/ui/bool_comparison.fixed index 02f1d09b833..600380fd142 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.fixed +++ b/src/tools/clippy/tests/ui/bool_comparison.fixed @@ -1,4 +1,4 @@ -#![allow(clippy::needless_if)] +#![allow(non_local_definitions, clippy::needless_if)] #![warn(clippy::bool_comparison)] #![allow(clippy::non_canonical_partial_ord_impl)] diff --git a/src/tools/clippy/tests/ui/bool_comparison.rs b/src/tools/clippy/tests/ui/bool_comparison.rs index 5ef696d855e..910df6151f8 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.rs +++ b/src/tools/clippy/tests/ui/bool_comparison.rs @@ -1,4 +1,4 @@ -#![allow(clippy::needless_if)] +#![allow(non_local_definitions, clippy::needless_if)] #![warn(clippy::bool_comparison)] #![allow(clippy::non_canonical_partial_ord_impl)] diff --git a/src/tools/clippy/tests/ui/crashes/ice-4760.rs b/src/tools/clippy/tests/ui/crashes/ice-4760.rs index 08b06961760..e1265169762 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-4760.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-4760.rs @@ -1,3 +1,5 @@ +#![allow(non_local_definitions)] + const COUNT: usize = 2; struct Thing; trait Dummy {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-6179.rs b/src/tools/clippy/tests/ui/crashes/ice-6179.rs index ce1895851e2..fffc0f7d0d4 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-6179.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-6179.rs @@ -3,6 +3,7 @@ #![warn(clippy::use_self)] #![allow(dead_code, clippy::let_with_type_underscore)] +#![allow(non_local_definitions)] struct Foo; diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed b/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed index 2521bce6a58..6d67488a713 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::explicit_into_iter_loop)] fn main() { diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs b/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs index 9eac96d182b..14630c07c5c 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::explicit_into_iter_loop)] fn main() { diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr b/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr index c03647ab433..a1e632271ed 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr @@ -1,5 +1,5 @@ error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:9:18 + --> $DIR/explicit_into_iter_loop.rs:10:18 | LL | for _ in iterator.into_iter() {} | ^^^^^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `iterator` @@ -8,31 +8,31 @@ LL | for _ in iterator.into_iter() {} = help: to override `-D warnings` add `#[allow(clippy::explicit_into_iter_loop)]` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:22:14 + --> $DIR/explicit_into_iter_loop.rs:23:14 | LL | for _ in t.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `&t` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:25:14 + --> $DIR/explicit_into_iter_loop.rs:26:14 | LL | for _ in r.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `r` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:33:14 + --> $DIR/explicit_into_iter_loop.rs:34:14 | LL | for _ in mr.into_iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&*mr` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:45:14 + --> $DIR/explicit_into_iter_loop.rs:46:14 | LL | for _ in u.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut u` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:48:14 + --> $DIR/explicit_into_iter_loop.rs:49:14 | LL | for _ in mr.into_iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut *mr` diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.fixed b/src/tools/clippy/tests/ui/explicit_iter_loop.fixed index f08397defa5..06229a52a18 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.fixed +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.fixed @@ -5,7 +5,8 @@ clippy::needless_borrow, clippy::deref_addrof, clippy::unnecessary_mut_passed, - dead_code + dead_code, + non_local_definitions, )] use core::slice; diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.rs b/src/tools/clippy/tests/ui/explicit_iter_loop.rs index 2ee6825d445..c2bf45ab2e9 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.rs +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.rs @@ -5,7 +5,8 @@ clippy::needless_borrow, clippy::deref_addrof, clippy::unnecessary_mut_passed, - dead_code + dead_code, + non_local_definitions, )] use core::slice; diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.stderr b/src/tools/clippy/tests/ui/explicit_iter_loop.stderr index 725d9b63cf8..007606b52c2 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.stderr +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.stderr @@ -1,5 +1,5 @@ error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:17:14 + --> $DIR/explicit_iter_loop.rs:18:14 | LL | for _ in vec.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `&vec` @@ -11,103 +11,103 @@ LL | #![deny(clippy::explicit_iter_loop)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:18:14 + --> $DIR/explicit_iter_loop.rs:19:14 | LL | for _ in vec.iter_mut() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut vec` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:21:14 + --> $DIR/explicit_iter_loop.rs:22:14 | LL | for _ in rvec.iter() {} | ^^^^^^^^^^^ help: to write this more concisely, try: `rvec` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:30:14 + --> $DIR/explicit_iter_loop.rs:31:14 | LL | for _ in [1, 2, 3].iter() {} | ^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[1, 2, 3]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:34:14 + --> $DIR/explicit_iter_loop.rs:35:14 | LL | for _ in [0; 32].iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[0; 32]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:35:14 + --> $DIR/explicit_iter_loop.rs:36:14 | LL | for _ in [0; 33].iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[0; 33]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:38:14 + --> $DIR/explicit_iter_loop.rs:39:14 | LL | for _ in ll.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&ll` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:40:14 + --> $DIR/explicit_iter_loop.rs:41:14 | LL | for _ in rll.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `rll` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:43:14 + --> $DIR/explicit_iter_loop.rs:44:14 | LL | for _ in vd.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&vd` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:45:14 + --> $DIR/explicit_iter_loop.rs:46:14 | LL | for _ in rvd.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `rvd` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:48:14 + --> $DIR/explicit_iter_loop.rs:49:14 | LL | for _ in bh.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bh` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:51:14 + --> $DIR/explicit_iter_loop.rs:52:14 | LL | for _ in hm.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&hm` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:54:14 + --> $DIR/explicit_iter_loop.rs:55:14 | LL | for _ in bt.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bt` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:57:14 + --> $DIR/explicit_iter_loop.rs:58:14 | LL | for _ in hs.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&hs` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:60:14 + --> $DIR/explicit_iter_loop.rs:61:14 | LL | for _ in bs.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bs` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:149:14 + --> $DIR/explicit_iter_loop.rs:150:14 | LL | for _ in x.iter() {} | ^^^^^^^^ help: to write this more concisely, try: `&x` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:150:14 + --> $DIR/explicit_iter_loop.rs:151:14 | LL | for _ in x.iter_mut() {} | ^^^^^^^^^^^^ help: to write this more concisely, try: `&mut x` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:153:14 + --> $DIR/explicit_iter_loop.rs:154:14 | LL | for _ in r.iter() {} | ^^^^^^^^ help: to write this more concisely, try: `r` diff --git a/src/tools/clippy/tests/ui/from_over_into.fixed b/src/tools/clippy/tests/ui/from_over_into.fixed index 4a68505ee0b..a33c1ea5738 100644 --- a/src/tools/clippy/tests/ui/from_over_into.fixed +++ b/src/tools/clippy/tests/ui/from_over_into.fixed @@ -1,5 +1,6 @@ #![feature(type_alias_impl_trait)] #![warn(clippy::from_over_into)] +#![allow(non_local_definitions)] #![allow(unused)] // this should throw an error diff --git a/src/tools/clippy/tests/ui/from_over_into.rs b/src/tools/clippy/tests/ui/from_over_into.rs index bf3ed0c2b64..6cd811ae401 100644 --- a/src/tools/clippy/tests/ui/from_over_into.rs +++ b/src/tools/clippy/tests/ui/from_over_into.rs @@ -1,5 +1,6 @@ #![feature(type_alias_impl_trait)] #![warn(clippy::from_over_into)] +#![allow(non_local_definitions)] #![allow(unused)] // this should throw an error diff --git a/src/tools/clippy/tests/ui/from_over_into.stderr b/src/tools/clippy/tests/ui/from_over_into.stderr index f1370ed844f..15b4e02a264 100644 --- a/src/tools/clippy/tests/ui/from_over_into.stderr +++ b/src/tools/clippy/tests/ui/from_over_into.stderr @@ -1,5 +1,5 @@ error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:8:1 + --> $DIR/from_over_into.rs:9:1 | LL | impl Into for String { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -14,7 +14,7 @@ LL ~ StringWrapper(val) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:16:1 + --> $DIR/from_over_into.rs:17:1 | LL | impl Into for String { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -27,7 +27,7 @@ LL ~ SelfType(String::new()) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:31:1 + --> $DIR/from_over_into.rs:32:1 | LL | impl Into for X { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -42,7 +42,7 @@ LL ~ let _: X = val; | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:43:1 + --> $DIR/from_over_into.rs:44:1 | LL | impl core::convert::Into for crate::ExplicitPaths { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -60,7 +60,7 @@ LL ~ val.0 | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:63:1 + --> $DIR/from_over_into.rs:64:1 | LL | impl Into for PathInExpansion { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -74,7 +74,7 @@ LL ~ fn from(val: PathInExpansion) -> Self { | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:85:5 + --> $DIR/from_over_into.rs:86:5 | LL | impl Into> for Vec { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -87,7 +87,7 @@ LL ~ FromOverInto(val) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:95:5 + --> $DIR/from_over_into.rs:96:5 | LL | impl Into<()> for Hello { | ^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.fixed b/src/tools/clippy/tests/ui/manual_str_repeat.fixed index 888a466278c..5f2f1bd9916 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.fixed +++ b/src/tools/clippy/tests/ui/manual_str_repeat.fixed @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::manual_str_repeat)] use std::borrow::Cow; diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.rs b/src/tools/clippy/tests/ui/manual_str_repeat.rs index a366351ffa4..3e3c7f4db4a 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.rs +++ b/src/tools/clippy/tests/ui/manual_str_repeat.rs @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::manual_str_repeat)] use std::borrow::Cow; diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.stderr b/src/tools/clippy/tests/ui/manual_str_repeat.stderr index 9a13aa97227..6eb6f2b85a8 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.stderr +++ b/src/tools/clippy/tests/ui/manual_str_repeat.stderr @@ -1,5 +1,5 @@ error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:7:21 + --> $DIR/manual_str_repeat.rs:8:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` @@ -8,55 +8,55 @@ LL | let _: String = std::iter::repeat("test").take(10).collect(); = help: to override `-D warnings` add `#[allow(clippy::manual_str_repeat)]` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:8:21 + --> $DIR/manual_str_repeat.rs:9:21 | LL | let _: String = std::iter::repeat('x').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"x".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:9:21 + --> $DIR/manual_str_repeat.rs:10:21 | LL | let _: String = std::iter::repeat('\'').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"'".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:10:21 + --> $DIR/manual_str_repeat.rs:11:21 | LL | let _: String = std::iter::repeat('"').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"\"".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:14:13 + --> $DIR/manual_str_repeat.rs:15:13 | LL | let _ = repeat(x).take(count + 2).collect::(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count + 2)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:23:21 + --> $DIR/manual_str_repeat.rs:24:21 | LL | let _: String = repeat(*x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*x).repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:32:21 + --> $DIR/manual_str_repeat.rs:33:21 | LL | let _: String = repeat(x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:44:21 + --> $DIR/manual_str_repeat.rs:45:21 | LL | let _: String = repeat(Cow::Borrowed("test")).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Cow::Borrowed("test").repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:47:21 + --> $DIR/manual_str_repeat.rs:48:21 | LL | let _: String = repeat(x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:62:21 + --> $DIR/manual_str_repeat.rs:63:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` diff --git a/src/tools/clippy/tests/ui/needless_borrow.fixed b/src/tools/clippy/tests/ui/needless_borrow.fixed index 23e8bf8a468..998f5430fdf 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.fixed +++ b/src/tools/clippy/tests/ui/needless_borrow.fixed @@ -1,6 +1,7 @@ #![feature(lint_reasons)] #![allow( unused, + non_local_definitions, clippy::uninlined_format_args, clippy::unnecessary_mut_passed, clippy::unnecessary_to_owned, diff --git a/src/tools/clippy/tests/ui/needless_borrow.rs b/src/tools/clippy/tests/ui/needless_borrow.rs index 27771a8f15b..acb2c74d849 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.rs +++ b/src/tools/clippy/tests/ui/needless_borrow.rs @@ -1,6 +1,7 @@ #![feature(lint_reasons)] #![allow( unused, + non_local_definitions, clippy::uninlined_format_args, clippy::unnecessary_mut_passed, clippy::unnecessary_to_owned, diff --git a/src/tools/clippy/tests/ui/needless_borrow.stderr b/src/tools/clippy/tests/ui/needless_borrow.stderr index a21ed8382c1..9034bd83a0b 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.stderr +++ b/src/tools/clippy/tests/ui/needless_borrow.stderr @@ -1,5 +1,5 @@ error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:15:15 + --> $DIR/needless_borrow.rs:16:15 | LL | let _ = x(&&a); // warn | ^^^ help: change this to: `&a` @@ -8,157 +8,157 @@ LL | let _ = x(&&a); // warn = help: to override `-D warnings` add `#[allow(clippy::needless_borrow)]` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:19:13 + --> $DIR/needless_borrow.rs:20:13 | LL | mut_ref(&mut &mut b); // warn | ^^^^^^^^^^^ help: change this to: `&mut b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:31:13 + --> $DIR/needless_borrow.rs:32:13 | LL | &&a | ^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:33:15 + --> $DIR/needless_borrow.rs:34:15 | LL | 46 => &&a, | ^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:39:27 + --> $DIR/needless_borrow.rs:40:27 | LL | break &ref_a; | ^^^^^^ help: change this to: `ref_a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:46:15 + --> $DIR/needless_borrow.rs:47:15 | LL | let _ = x(&&&a); | ^^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:47:15 + --> $DIR/needless_borrow.rs:48:15 | LL | let _ = x(&mut &&a); | ^^^^^^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:48:15 + --> $DIR/needless_borrow.rs:49:15 | LL | let _ = x(&&&mut b); | ^^^^^^^^ help: change this to: `&mut b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:49:15 + --> $DIR/needless_borrow.rs:50:15 | LL | let _ = x(&&ref_a); | ^^^^^^^ help: change this to: `ref_a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:52:11 + --> $DIR/needless_borrow.rs:53:11 | LL | x(&b); | ^^ help: change this to: `b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:59:13 + --> $DIR/needless_borrow.rs:60:13 | LL | mut_ref(&mut x); | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:60:13 + --> $DIR/needless_borrow.rs:61:13 | LL | mut_ref(&mut &mut x); | ^^^^^^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:61:23 + --> $DIR/needless_borrow.rs:62:23 | LL | let y: &mut i32 = &mut x; | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:62:23 + --> $DIR/needless_borrow.rs:63:23 | LL | let y: &mut i32 = &mut &mut x; | ^^^^^^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:71:14 + --> $DIR/needless_borrow.rs:72:14 | LL | 0 => &mut x, | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:77:14 + --> $DIR/needless_borrow.rs:78:14 | LL | 0 => &mut x, | ^^^^^^ help: change this to: `x` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:89:13 + --> $DIR/needless_borrow.rs:90:13 | LL | let _ = (&x).0; | ^^^^ help: change this to: `x` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:91:22 + --> $DIR/needless_borrow.rs:92:22 | LL | let _ = unsafe { (&*x).0 }; | ^^^^^ help: change this to: `(*x)` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:101:5 + --> $DIR/needless_borrow.rs:102:5 | LL | (&&()).foo(); | ^^^^^^ help: change this to: `(&())` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:110:5 + --> $DIR/needless_borrow.rs:111:5 | LL | (&&5).foo(); | ^^^^^ help: change this to: `(&5)` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:136:23 + --> $DIR/needless_borrow.rs:137:23 | LL | let x: (&str,) = (&"",); | ^^^ help: change this to: `""` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:178:13 + --> $DIR/needless_borrow.rs:179:13 | LL | (&self.f)() | ^^^^^^^^^ help: change this to: `(self.f)` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:187:13 + --> $DIR/needless_borrow.rs:188:13 | LL | (&mut self.f)() | ^^^^^^^^^^^^^ help: change this to: `(self.f)` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:224:22 + --> $DIR/needless_borrow.rs:225:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:231:22 + --> $DIR/needless_borrow.rs:232:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:235:22 + --> $DIR/needless_borrow.rs:236:22 | LL | let _ = &mut (&mut x.u).x; | ^^^^^^^^^^ help: change this to: `x.u` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:236:22 + --> $DIR/needless_borrow.rs:237:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` From 1b733558bf7ac425a2b56726e7229cc298982536 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 17:37:53 +0100 Subject: [PATCH 18/92] Allow newly added non_local_definitions in std --- library/core/src/convert/mod.rs | 1 + library/core/src/hash/mod.rs | 1 + library/core/tests/iter/adapters/step_by.rs | 1 + library/core/tests/result.rs | 2 ++ 4 files changed, 5 insertions(+) diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index 45f6e375e89..85740dce866 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -396,6 +396,7 @@ pub trait AsMut { /// For example, take this code: /// /// ``` +/// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))] /// struct Wrapper(Vec); /// impl From> for Vec { /// fn from(w: Wrapper) -> Vec { diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs index 153971a59c5..bfdd28a7399 100644 --- a/library/core/src/hash/mod.rs +++ b/library/core/src/hash/mod.rs @@ -454,6 +454,7 @@ pub trait Hasher { /// ``` /// #![feature(hasher_prefixfree_extras)] /// # // Stubs to make the `impl` below pass the compiler + /// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))] /// # struct MyCollection(Option); /// # impl MyCollection { /// # fn len(&self) -> usize { todo!() } diff --git a/library/core/tests/iter/adapters/step_by.rs b/library/core/tests/iter/adapters/step_by.rs index b4d61d28cb2..29adf0b42fa 100644 --- a/library/core/tests/iter/adapters/step_by.rs +++ b/library/core/tests/iter/adapters/step_by.rs @@ -49,6 +49,7 @@ fn test_iterator_step_by_nth() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] fn test_iterator_step_by_nth_overflow() { #[cfg(target_pointer_width = "16")] type Bigger = u32; diff --git a/library/core/tests/result.rs b/library/core/tests/result.rs index 6c008ab2cb1..d02dc45da34 100644 --- a/library/core/tests/result.rs +++ b/library/core/tests/result.rs @@ -195,6 +195,7 @@ pub fn test_unwrap_or_default() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] pub fn test_into_ok() { fn infallible_op() -> Result { Ok(666) @@ -217,6 +218,7 @@ pub fn test_into_ok() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] pub fn test_into_err() { fn until_error_op() -> Result { Err(666) From 7b6057dea42c0aaec0735b155a2bc1aeaa0c0a93 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 17:39:16 +0100 Subject: [PATCH 19/92] Allow newly added non_local_definitions in rustfmt --- src/tools/rustfmt/src/source_file.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/rustfmt/src/source_file.rs b/src/tools/rustfmt/src/source_file.rs index 958f9b0154f..512a8593c27 100644 --- a/src/tools/rustfmt/src/source_file.rs +++ b/src/tools/rustfmt/src/source_file.rs @@ -66,6 +66,7 @@ where } } + #[cfg_attr(not(bootstrap), allow(non_local_definitions))] impl From<&FileName> for rustc_span::FileName { fn from(filename: &FileName) -> rustc_span::FileName { match filename { From 6320ad0b07be860232d2b1cf9e3bdf870ca28894 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 18:26:37 +0100 Subject: [PATCH 20/92] Fix non_local_definitions lint in rustdoc --- src/librustdoc/doctest.rs | 7 +++++++ src/librustdoc/html/markdown/tests.rs | 5 ----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index f9d4d1af114..828c72b2840 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1208,6 +1208,13 @@ impl Tester for Collector { } } +#[cfg(test)] // used in tests +impl Tester for Vec { + fn add_test(&mut self, _test: String, _config: LangString, line: usize) { + self.push(line); + } +} + struct HirCollector<'a, 'hir, 'tcx> { sess: &'a Session, collector: &'a mut Collector, diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs index 4dd176b3a69..1de97e49b83 100644 --- a/src/librustdoc/html/markdown/tests.rs +++ b/src/librustdoc/html/markdown/tests.rs @@ -480,11 +480,6 @@ fn test_markdown_html_escape() { #[test] fn test_find_testable_code_line() { fn t(input: &str, expect: &[usize]) { - impl crate::doctest::Tester for Vec { - fn add_test(&mut self, _test: String, _config: LangString, line: usize) { - self.push(line); - } - } let mut lines = Vec::::new(); find_testable_code(input, &mut lines, ErrorCodes::No, false, None, true); assert_eq!(lines, expect); From a8ae1175c7e8f96996b69d75ca6a18e58efb7814 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Jan 2024 13:49:45 +0100 Subject: [PATCH 21/92] Fix non_local_definitions lint in rustc_hir_analysis --- .../src/coherence/inherent_impls_overlap.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs index 9e1e884d976..3aef29f4ae4 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs @@ -24,6 +24,11 @@ struct InherentOverlapChecker<'tcx> { tcx: TyCtxt<'tcx>, } +rustc_index::newtype_index! { + #[orderable] + pub struct RegionId {} +} + impl<'tcx> InherentOverlapChecker<'tcx> { /// Checks whether any associated items in impls 1 and 2 share the same identifier and /// namespace. @@ -205,11 +210,6 @@ impl<'tcx> InherentOverlapChecker<'tcx> { // This is advantageous to running the algorithm over the // entire graph when there are many connected regions. - rustc_index::newtype_index! { - #[orderable] - pub struct RegionId {} - } - struct ConnectedRegion { idents: SmallVec<[Symbol; 8]>, impl_blocks: FxHashSet, From 85e3a2ee043f6404561a9fbed799c07d83f305ce Mon Sep 17 00:00:00 2001 From: Urgau Date: Thu, 15 Feb 2024 20:30:16 +0100 Subject: [PATCH 22/92] Add const-anon suggestion for non local impl --- compiler/rustc_lint/messages.ftl | 1 + compiler/rustc_lint/src/lints.rs | 8 +++++++- compiler/rustc_lint/src/non_local_def.rs | 16 +++++++++++++++- tests/ui/lint/non_local_definitions.stderr | 3 +++ 4 files changed, 26 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 4e0ba376b7d..ca3941c06f4 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -421,6 +421,7 @@ lint_non_local_definitions_impl = non-local `impl` definition, they should be av } .non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type + .const_anon = use a const-anon item to suppress this lint lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation .help = diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 15f158961d0..1a56fa751c2 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1301,7 +1301,13 @@ pub enum NonLocalDefinitionsDiag { #[note(lint_non_local)] #[note(lint_exception)] #[note(lint_non_local_definitions_deprecation)] - Impl { depth: u32, body_kind_descr: &'static str, body_name: String }, + Impl { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")] + const_anon: Option, + }, #[diag(lint_non_local_definitions_macro_rules)] #[help] #[note(lint_non_local)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index 28bd49f36a0..bfd7b4a72f9 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -1,4 +1,4 @@ -use rustc_hir::{def::DefKind, Body, Item, ItemKind, Path, QPath, TyKind}; +use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind}; use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; use smallvec::{smallvec, SmallVec}; @@ -140,6 +140,19 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { // If none of them have a local parent (LOGICAL NOR) this means that // this impl definition is a non-local definition and so we lint on it. if !(self_ty_has_local_parent || of_trait_has_local_parent) { + let const_anon = if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name != Some(kw::Underscore) + && let Some(parent) = parent.as_local() + && let Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) + && let ItemKind::Const(ty, _, _) = item.kind + && let TyKind::Tup(&[]) = ty.kind + { + Some(item.ident.span) + } else { + None + }; + cx.emit_span_lint( NON_LOCAL_DEFINITIONS, item.span, @@ -149,6 +162,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + const_anon, }, ) } diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr index f15457734bc..8403357b115 100644 --- a/tests/ui/lint/non_local_definitions.stderr +++ b/tests/ui/lint/non_local_definitions.stderr @@ -1,6 +1,9 @@ warning: non-local `impl` definition, they should be avoided as they go against expectation --> $DIR/non_local_definitions.rs:32:5 | +LL | const Z: () = { + | - help: use a const-anon item to suppress this lint: `_` +... LL | impl Uto for &Test {} | ^^^^^^^^^^^^^^^^^^^^^ | From 63469ab762b5710b07418e61ff758bd48d0f8b3e Mon Sep 17 00:00:00 2001 From: Urgau Date: Thu, 15 Feb 2024 20:33:30 +0100 Subject: [PATCH 23/92] Add cargo update suggestion for non local defs --- compiler/rustc_lint/messages.ftl | 2 + compiler/rustc_lint/src/lints.rs | 18 ++- compiler/rustc_lint/src/non_local_def.rs | 25 ++++- tests/ui/lint/auxiliary/non_local_macro.rs | 26 +++++ tests/ui/lint/non_local_definitions.rs | 12 ++ tests/ui/lint/non_local_definitions.stderr | 124 +++++++++++++-------- 6 files changed, 155 insertions(+), 52 deletions(-) create mode 100644 tests/ui/lint/auxiliary/non_local_macro.rs diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index ca3941c06f4..3f7abebf7b9 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -411,6 +411,8 @@ lint_non_fmt_panic_unused = } .add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally +lint_non_local_definitions_cargo_update = the {$macro_kind} `{$macro_name}` may come from an old version of the `{$crate_name}` crate, try updating your dependency with `cargo update -p {$crate_name}` + lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 1a56fa751c2..3a795205059 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1305,6 +1305,8 @@ pub enum NonLocalDefinitionsDiag { depth: u32, body_kind_descr: &'static str, body_name: String, + #[subdiagnostic] + cargo_update: Option, #[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")] const_anon: Option, }, @@ -1313,7 +1315,21 @@ pub enum NonLocalDefinitionsDiag { #[note(lint_non_local)] #[note(lint_exception)] #[note(lint_non_local_definitions_deprecation)] - MacroRules { depth: u32, body_kind_descr: &'static str, body_name: String }, + MacroRules { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[subdiagnostic] + cargo_update: Option, + }, +} + +#[derive(Subdiagnostic)] +#[note(lint_non_local_definitions_cargo_update)] +pub struct NonLocalDefinitionsCargoUpdateNote { + pub macro_kind: &'static str, + pub macro_name: Symbol, + pub crate_name: Symbol, } // pass_by_value.rs diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index bfd7b4a72f9..6cb6fd1cbd5 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -1,9 +1,11 @@ use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind}; -use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; +use rustc_span::def_id::{DefId, LOCAL_CRATE}; +use rustc_span::{sym, symbol::kw, ExpnKind, MacroKind}; use smallvec::{smallvec, SmallVec}; -use crate::{lints::NonLocalDefinitionsDiag, LateContext, LateLintPass, LintContext}; +use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag}; +use crate::{LateContext, LateLintPass, LintContext}; declare_lint! { /// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]` @@ -77,6 +79,23 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { return; } + let cargo_update = || { + let oexpn = item.span.ctxt().outer_expn_data(); + if let Some(def_id) = oexpn.macro_def_id + && let ExpnKind::Macro(macro_kind, macro_name) = oexpn.kind + && def_id.krate != LOCAL_CRATE + && std::env::var_os("CARGO").is_some() + { + Some(NonLocalDefinitionsCargoUpdateNote { + macro_kind: macro_kind.descr(), + macro_name, + crate_name: cx.tcx.crate_name(def_id.krate), + }) + } else { + None + } + }; + match item.kind { ItemKind::Impl(impl_) => { // The RFC states: @@ -162,6 +181,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + cargo_update: cargo_update(), const_anon, }, ) @@ -179,6 +199,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + cargo_update: cargo_update(), }, ) } diff --git a/tests/ui/lint/auxiliary/non_local_macro.rs b/tests/ui/lint/auxiliary/non_local_macro.rs new file mode 100644 index 00000000000..8c0ff8adda1 --- /dev/null +++ b/tests/ui/lint/auxiliary/non_local_macro.rs @@ -0,0 +1,26 @@ +#[macro_export] +macro_rules! non_local_impl { + ($a:ident) => { + const _IMPL_DEBUG: () = { + impl ::std::fmt::Debug for $a { + fn fmt(&self, _: &mut ::std::fmt::Formatter<'_>) + -> ::std::result::Result<(), ::std::fmt::Error> + { + todo!() + } + } + }; + } +} + +#[macro_export] +macro_rules! non_local_macro_rules { + ($a:ident) => { + const _MACRO_EXPORT: () = { + #[macro_export] + macro_rules! $a { + () => {} + } + }; + } +} diff --git a/tests/ui/lint/non_local_definitions.rs b/tests/ui/lint/non_local_definitions.rs index 986efbfcf0f..c9aaa049346 100644 --- a/tests/ui/lint/non_local_definitions.rs +++ b/tests/ui/lint/non_local_definitions.rs @@ -1,8 +1,12 @@ //@ check-pass //@ edition:2021 +//@ aux-build:non_local_macro.rs +//@ rustc-env:CARGO=/usr/bin/cargo #![feature(inline_const)] +extern crate non_local_macro; + use std::fmt::{Debug, Display}; struct Test; @@ -364,6 +368,14 @@ macro_rules! m { m!(); +struct CargoUpdate; + +non_local_macro::non_local_impl!(CargoUpdate); +//~^ WARN non-local `impl` definition + +non_local_macro::non_local_macro_rules!(my_macro); +//~^ WARN non-local `macro_rules!` definition + fn bitflags() { struct Flags; diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr index 8403357b115..f9f29ec63a8 100644 --- a/tests/ui/lint/non_local_definitions.stderr +++ b/tests/ui/lint/non_local_definitions.stderr @@ -1,5 +1,5 @@ warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:32:5 + --> $DIR/non_local_definitions.rs:36:5 | LL | const Z: () = { | - help: use a const-anon item to suppress this lint: `_` @@ -14,7 +14,7 @@ LL | impl Uto for &Test {} = note: `#[warn(non_local_definitions)]` on by default warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:42:5 + --> $DIR/non_local_definitions.rs:46:5 | LL | impl Uto for *mut Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | impl Uto for *mut Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:50:9 + --> $DIR/non_local_definitions.rs:54:9 | LL | impl Uto for Test {} | ^^^^^^^^^^^^^^^^^^^^ @@ -36,7 +36,7 @@ LL | impl Uto for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:59:5 + --> $DIR/non_local_definitions.rs:63:5 | LL | impl Uto2 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -47,7 +47,7 @@ LL | impl Uto2 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:67:5 + --> $DIR/non_local_definitions.rs:71:5 | LL | impl Uto3 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -58,7 +58,7 @@ LL | impl Uto3 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:71:5 + --> $DIR/non_local_definitions.rs:75:5 | LL | macro_rules! m0 { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,7 +69,7 @@ LL | macro_rules! m0 { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:83:5 + --> $DIR/non_local_definitions.rs:87:5 | LL | macro_rules! m { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -80,7 +80,7 @@ LL | macro_rules! m { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:86:5 + --> $DIR/non_local_definitions.rs:90:5 | LL | / impl Test { LL | | @@ -94,7 +94,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:92:9 + --> $DIR/non_local_definitions.rs:96:9 | LL | / impl Test { LL | | @@ -108,7 +108,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:101:9 + --> $DIR/non_local_definitions.rs:105:9 | LL | / impl Test { LL | | @@ -122,7 +122,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:110:9 + --> $DIR/non_local_definitions.rs:114:9 | LL | / impl Test { LL | | @@ -136,7 +136,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:118:5 + --> $DIR/non_local_definitions.rs:122:5 | LL | / impl Display for Test { LL | | @@ -152,7 +152,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:125:5 + --> $DIR/non_local_definitions.rs:129:5 | LL | impl dyn Uto5 {} | ^^^^^^^^^^^^^^^^ @@ -163,7 +163,7 @@ LL | impl dyn Uto5 {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:128:5 + --> $DIR/non_local_definitions.rs:132:5 | LL | impl Uto5 for Vec { } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -174,7 +174,7 @@ LL | impl Uto5 for Vec { } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:131:5 + --> $DIR/non_local_definitions.rs:135:5 | LL | impl Uto5 for &dyn Uto5 {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -185,7 +185,7 @@ LL | impl Uto5 for &dyn Uto5 {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:134:5 + --> $DIR/non_local_definitions.rs:138:5 | LL | impl Uto5 for *mut Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -196,7 +196,7 @@ LL | impl Uto5 for *mut Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:137:5 + --> $DIR/non_local_definitions.rs:141:5 | LL | impl Uto5 for *mut [Test] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -207,7 +207,7 @@ LL | impl Uto5 for *mut [Test] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:140:5 + --> $DIR/non_local_definitions.rs:144:5 | LL | impl Uto5 for [Test; 8] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -218,7 +218,7 @@ LL | impl Uto5 for [Test; 8] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:143:5 + --> $DIR/non_local_definitions.rs:147:5 | LL | impl Uto5 for (Test,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -229,7 +229,7 @@ LL | impl Uto5 for (Test,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:146:5 + --> $DIR/non_local_definitions.rs:150:5 | LL | impl Uto5 for fn(Test) -> () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -240,7 +240,7 @@ LL | impl Uto5 for fn(Test) -> () {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:149:5 + --> $DIR/non_local_definitions.rs:153:5 | LL | impl Uto5 for fn() -> Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -251,7 +251,7 @@ LL | impl Uto5 for fn() -> Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:153:9 + --> $DIR/non_local_definitions.rs:157:9 | LL | impl Uto5 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -262,7 +262,7 @@ LL | impl Uto5 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:160:9 + --> $DIR/non_local_definitions.rs:164:9 | LL | impl Uto5 for &Test {} | ^^^^^^^^^^^^^^^^^^^^^^ @@ -273,7 +273,7 @@ LL | impl Uto5 for &Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:167:9 + --> $DIR/non_local_definitions.rs:171:9 | LL | impl Uto5 for &(Test,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -284,7 +284,7 @@ LL | impl Uto5 for &(Test,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:174:9 + --> $DIR/non_local_definitions.rs:178:9 | LL | impl Uto5 for &(Test,Test) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -295,7 +295,7 @@ LL | impl Uto5 for &(Test,Test) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:182:5 + --> $DIR/non_local_definitions.rs:186:5 | LL | impl Uto5 for *mut InsideMain {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -306,7 +306,7 @@ LL | impl Uto5 for *mut InsideMain {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:184:5 + --> $DIR/non_local_definitions.rs:188:5 | LL | impl Uto5 for *mut [InsideMain] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -317,7 +317,7 @@ LL | impl Uto5 for *mut [InsideMain] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:186:5 + --> $DIR/non_local_definitions.rs:190:5 | LL | impl Uto5 for [InsideMain; 8] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -328,7 +328,7 @@ LL | impl Uto5 for [InsideMain; 8] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:188:5 + --> $DIR/non_local_definitions.rs:192:5 | LL | impl Uto5 for (InsideMain,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -339,7 +339,7 @@ LL | impl Uto5 for (InsideMain,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:190:5 + --> $DIR/non_local_definitions.rs:194:5 | LL | impl Uto5 for fn(InsideMain) -> () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -350,7 +350,7 @@ LL | impl Uto5 for fn(InsideMain) -> () {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:192:5 + --> $DIR/non_local_definitions.rs:196:5 | LL | impl Uto5 for fn() -> InsideMain {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -361,7 +361,7 @@ LL | impl Uto5 for fn() -> InsideMain {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:206:9 + --> $DIR/non_local_definitions.rs:210:9 | LL | / impl Display for InsideMain { LL | | @@ -377,7 +377,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:213:9 + --> $DIR/non_local_definitions.rs:217:9 | LL | / impl InsideMain { LL | | @@ -394,7 +394,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:217:17 + --> $DIR/non_local_definitions.rs:221:17 | LL | macro_rules! m2 { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -405,7 +405,7 @@ LL | macro_rules! m2 { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:227:5 + --> $DIR/non_local_definitions.rs:231:5 | LL | impl Uto3 for Vec { } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -416,7 +416,7 @@ LL | impl Uto3 for Vec { } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:236:5 + --> $DIR/non_local_definitions.rs:240:5 | LL | impl Uto7 for Test where Local: std::any::Any {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -427,7 +427,7 @@ LL | impl Uto7 for Test where Local: std::any::Any {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:239:5 + --> $DIR/non_local_definitions.rs:243:5 | LL | impl Uto8 for T {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -438,7 +438,7 @@ LL | impl Uto8 for T {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:248:5 + --> $DIR/non_local_definitions.rs:252:5 | LL | / impl Default for UwU { LL | | @@ -454,7 +454,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:259:5 + --> $DIR/non_local_definitions.rs:263:5 | LL | / impl From for () { LL | | @@ -470,7 +470,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:268:5 + --> $DIR/non_local_definitions.rs:272:5 | LL | / impl AsRef for () { LL | | @@ -484,7 +484,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:279:5 + --> $DIR/non_local_definitions.rs:283:5 | LL | / impl PartialEq for G { LL | | @@ -500,7 +500,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:296:5 + --> $DIR/non_local_definitions.rs:300:5 | LL | / impl PartialEq for &Dog { LL | | @@ -516,7 +516,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:303:5 + --> $DIR/non_local_definitions.rs:307:5 | LL | / impl PartialEq<()> for Dog { LL | | @@ -532,7 +532,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:310:5 + --> $DIR/non_local_definitions.rs:314:5 | LL | / impl PartialEq<()> for &Dog { LL | | @@ -548,7 +548,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:317:5 + --> $DIR/non_local_definitions.rs:321:5 | LL | / impl PartialEq for () { LL | | @@ -564,7 +564,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:339:5 + --> $DIR/non_local_definitions.rs:343:5 | LL | / impl From>> for () { LL | | @@ -580,7 +580,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:346:5 + --> $DIR/non_local_definitions.rs:350:5 | LL | / impl From<()> for Wrap { LL | | @@ -596,7 +596,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:359:13 + --> $DIR/non_local_definitions.rs:363:13 | LL | impl MacroTrait for OutsideStruct {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -610,5 +610,31 @@ LL | m!(); = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue = note: this warning originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) -warning: 48 warnings emitted +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:373:1 + | +LL | non_local_macro::non_local_impl!(CargoUpdate); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `_IMPL_DEBUG` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: the macro `non_local_macro::non_local_impl` may come from an old version of the `non_local_macro` crate, try updating your dependency with `cargo update -p non_local_macro` + = note: this warning originates in the macro `non_local_macro::non_local_impl` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:376:1 + | +LL | non_local_macro::non_local_macro_rules!(my_macro); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current constant `_MACRO_EXPORT` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: the macro `non_local_macro::non_local_macro_rules` may come from an old version of the `non_local_macro` crate, try updating your dependency with `cargo update -p non_local_macro` + = note: this warning originates in the macro `non_local_macro::non_local_macro_rules` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: 50 warnings emitted From 6b17dba68cff05978c10eb2600c16d4450ad77f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 18 Feb 2024 09:41:20 +0200 Subject: [PATCH 24/92] Merge commit 'ac998a74b3c8ff4b81c3eeb9a18811d4cc76226d' into sync-from-ra --- .github/rust.json | 33 + .github/workflows/autopublish.yaml | 2 +- .github/workflows/ci.yaml | 20 +- .github/workflows/fuzz.yml | 2 +- .github/workflows/metrics.yaml | 16 +- .github/workflows/publish-libs.yaml | 2 +- .github/workflows/release.yaml | 14 +- .github/workflows/rustdoc.yaml | 2 +- Cargo.lock | 17 +- Cargo.toml | 2 +- crates/base-db/src/input.rs | 228 +---- crates/base-db/src/lib.rs | 14 + crates/flycheck/src/lib.rs | 79 +- crates/hir-def/src/attr.rs | 42 +- crates/hir-def/src/body/lower.rs | 11 +- crates/hir-def/src/body/pretty.rs | 6 + crates/hir-def/src/body/scope.rs | 1 + crates/hir-def/src/data.rs | 12 +- crates/hir-def/src/expander.rs | 6 +- crates/hir-def/src/find_path.rs | 63 +- crates/hir-def/src/hir.rs | 9 + .../src/macro_expansion_tests/mbe/matching.rs | 2 +- .../macro_expansion_tests/mbe/meta_syntax.rs | 42 +- .../macro_expansion_tests/mbe/metavar_expr.rs | 6 +- .../macro_expansion_tests/mbe/regression.rs | 54 ++ .../mbe/tt_conversion.rs | 4 +- .../hir-def/src/macro_expansion_tests/mod.rs | 1 + crates/hir-def/src/nameres/collector.rs | 95 +- crates/hir-def/src/nameres/diagnostics.rs | 3 + crates/hir-expand/src/builtin_fn_macro.rs | 2 +- crates/hir-expand/src/change.rs | 34 +- crates/hir-expand/src/db.rs | 154 ++-- crates/hir-expand/src/declarative.rs | 14 +- crates/hir-expand/src/lib.rs | 26 +- crates/hir-expand/src/mod_path.rs | 15 + crates/hir-expand/src/proc_macro.rs | 46 +- crates/hir-ty/src/diagnostics/expr.rs | 13 +- .../diagnostics/match_check/pat_analysis.rs | 46 +- crates/hir-ty/src/infer.rs | 133 ++- crates/hir-ty/src/infer/closure.rs | 4 + crates/hir-ty/src/infer/expr.rs | 23 + crates/hir-ty/src/infer/mutability.rs | 4 + crates/hir-ty/src/infer/unify.rs | 83 +- crates/hir-ty/src/layout/target.rs | 8 +- crates/hir-ty/src/layout/tests.rs | 12 +- crates/hir-ty/src/lib.rs | 4 +- crates/hir-ty/src/mir/borrowck.rs | 185 +++- crates/hir-ty/src/mir/lower.rs | 4 +- crates/hir-ty/src/tests/diagnostics.rs | 42 + crates/hir-ty/src/tests/simple.rs | 3 - crates/hir/src/lib.rs | 246 ++++- crates/hir/src/term_search.rs | 298 ++++++ crates/hir/src/term_search/expr.rs | 468 ++++++++++ crates/hir/src/term_search/tactics.rs | 859 ++++++++++++++++++ .../src/handlers/fix_visibility.rs | 4 +- .../src/handlers/generate_trait_from_impl.rs | 106 +-- .../ide-assists/src/handlers/term_search.rs | 253 ++++++ crates/ide-assists/src/lib.rs | 7 +- crates/ide-assists/src/tests/generated.rs | 4 +- crates/ide-completion/src/completions.rs | 10 +- crates/ide-completion/src/completions/expr.rs | 56 ++ .../src/completions/flyimport.rs | 2 + crates/ide-completion/src/completions/type.rs | 20 + crates/ide-completion/src/config.rs | 1 + crates/ide-completion/src/context.rs | 3 +- crates/ide-completion/src/item.rs | 50 + crates/ide-completion/src/render.rs | 443 ++++++++- crates/ide-completion/src/render/function.rs | 50 +- crates/ide-completion/src/tests.rs | 1 + crates/ide-completion/src/tests/expression.rs | 32 +- crates/ide-completion/src/tests/flyimport.rs | 19 + crates/ide-completion/src/tests/record.rs | 2 + crates/ide-completion/src/tests/special.rs | 40 +- crates/ide-completion/src/tests/type_pos.rs | 40 + crates/ide-db/src/famous_defs.rs | 8 + crates/ide-db/src/path_transform.rs | 2 +- crates/ide-db/src/rename.rs | 5 +- crates/ide-db/src/source_change.rs | 68 +- crates/ide-db/src/syntax_helpers/node_ext.rs | 1 + .../src/handlers/incorrect_case.rs | 2 +- .../src/handlers/macro_error.rs | 2 +- .../src/handlers/missing_match_arms.rs | 18 + .../src/handlers/remove_trailing_return.rs | 12 + .../src/handlers/type_mismatch.rs | 3 +- .../src/handlers/typed_hole.rs | 272 ++++-- crates/ide-diagnostics/src/tests.rs | 85 ++ crates/ide/src/doc_links.rs | 2 +- crates/ide/src/hover/tests.rs | 8 +- crates/ide/src/lib.rs | 12 +- crates/ide/src/parent_module.rs | 2 +- crates/ide/src/rename.rs | 41 +- crates/ide/src/shuffle_crate_graph.rs | 2 - crates/ide/src/static_index.rs | 23 +- crates/ide/src/status.rs | 8 - crates/load-cargo/src/lib.rs | 74 +- crates/mbe/src/expander/transcriber.rs | 18 +- crates/mbe/src/syntax_bridge.rs | 6 +- crates/parser/src/grammar/expressions.rs | 29 +- crates/parser/src/grammar/expressions/atom.rs | 14 + crates/parser/src/grammar/generic_params.rs | 10 + crates/parser/src/grammar/patterns.rs | 9 + crates/parser/src/syntax_kind/generated.rs | 6 +- ...cord_literal_before_ellipsis_recovery.rast | 36 +- ...0032_record_literal_field_eq_recovery.rast | 41 + .../0032_record_literal_field_eq_recovery.rs | 3 + .../0033_record_pat_field_eq_recovery.rast | 43 + .../err/0033_record_pat_field_eq_recovery.rs | 3 + .../parser/inline/ok/0209_become_expr.rast | 31 + .../parser/inline/ok/0209_become_expr.rs | 3 + .../inline/ok/0211_async_trait_bound.rast | 43 + .../inline/ok/0211_async_trait_bound.rs | 1 + .../inline/ok/0212_const_trait_bound.rast | 34 + .../inline/ok/0212_const_trait_bound.rs | 1 + crates/proc-macro-api/src/lib.rs | 8 +- crates/proc-macro-api/src/process.rs | 25 +- crates/proc-macro-srv/Cargo.toml | 1 + crates/proc-macro-srv/src/lib.rs | 5 + .../src/server/rust_analyzer_span.rs | 55 +- crates/proc-macro-srv/src/server/token_id.rs | 55 +- crates/proc-macro-srv/src/tests/mod.rs | 18 +- crates/project-model/src/build_scripts.rs | 25 +- crates/project-model/src/cargo_workspace.rs | 39 +- crates/project-model/src/project_json.rs | 3 +- crates/project-model/src/rustc_cfg.rs | 46 +- crates/project-model/src/sysroot.rs | 124 ++- .../project-model/src/target_data_layout.rs | 60 +- crates/project-model/src/tests.rs | 71 +- crates/project-model/src/workspace.rs | 549 ++++++----- .../cargo_hello_world_project_model.txt | 27 - ...project_model_with_selective_overrides.txt | 27 - ..._project_model_with_wildcard_overrides.txt | 27 - ...rust_project_hello_world_project_model.txt | 60 -- crates/rust-analyzer/src/bin/main.rs | 28 +- crates/rust-analyzer/src/bin/rustc_wrapper.rs | 19 +- crates/rust-analyzer/src/cargo_target_spec.rs | 2 +- .../rust-analyzer/src/cli/analysis_stats.rs | 216 ++++- crates/rust-analyzer/src/cli/flags.rs | 7 + crates/rust-analyzer/src/cli/scip.rs | 31 +- crates/rust-analyzer/src/config.rs | 19 +- crates/rust-analyzer/src/global_state.rs | 41 +- .../src/handlers/notification.rs | 22 +- crates/rust-analyzer/src/handlers/request.rs | 11 +- .../src/integrated_benchmarks.rs | 3 + crates/rust-analyzer/src/lib.rs | 4 +- crates/rust-analyzer/src/lsp/to_proto.rs | 673 ++++++++++++-- crates/rust-analyzer/src/main_loop.rs | 41 +- crates/rust-analyzer/src/reload.rs | 193 +++- crates/rust-analyzer/tests/crate_graph.rs | 118 +++ crates/rust-analyzer/tests/slow-tests/main.rs | 16 +- .../rust-analyzer/tests/slow-tests/support.rs | 9 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 21 - .../deduplication_crate_graph_A.json | 0 .../deduplication_crate_graph_B.json | 0 crates/salsa/src/doctest.rs | 115 --- crates/salsa/src/lib.rs | 1 - crates/syntax/rust.ungram | 6 +- crates/syntax/src/ast/edit_in_place.rs | 28 +- crates/syntax/src/ast/generated/nodes.rs | 37 +- crates/syntax/src/ast/make.rs | 2 +- crates/syntax/src/ast/node_ext.rs | 20 + crates/syntax/src/ast/prec.rs | 8 +- crates/syntax/src/lib.rs | 5 - crates/syntax/src/tests/ast_src.rs | 6 +- crates/test-fixture/src/lib.rs | 88 +- crates/test-utils/src/fixture.rs | 38 +- crates/test-utils/src/minicore.rs | 33 + crates/toolchain/src/lib.rs | 42 +- crates/tt/src/lib.rs | 1 + docs/user/generated_config.adoc | 17 +- editors/code/.vscodeignore | 3 + .../code/language-configuration-rustdoc.json | 37 + editors/code/package.json | 44 +- editors/code/rustdoc-inject.json | 93 ++ editors/code/rustdoc.json | 82 ++ editors/code/src/rust_project.ts | 19 + lib/lsp-server/LICENSE-APACHE | 1 + lib/lsp-server/LICENSE-MIT | 1 + xtask/src/metrics.rs | 2 - 178 files changed, 7101 insertions(+), 1965 deletions(-) create mode 100644 .github/rust.json create mode 100644 crates/hir/src/term_search.rs create mode 100644 crates/hir/src/term_search/expr.rs create mode 100644 crates/hir/src/term_search/tactics.rs create mode 100644 crates/ide-assists/src/handlers/term_search.rs create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs create mode 100644 crates/rust-analyzer/tests/crate_graph.rs rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_A.json (100%) rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_B.json (100%) delete mode 100644 crates/salsa/src/doctest.rs create mode 100644 editors/code/language-configuration-rustdoc.json create mode 100644 editors/code/rustdoc-inject.json create mode 100644 editors/code/rustdoc.json create mode 120000 lib/lsp-server/LICENSE-APACHE create mode 120000 lib/lsp-server/LICENSE-MIT diff --git a/.github/rust.json b/.github/rust.json new file mode 100644 index 00000000000..ddaa1b0824b --- /dev/null +++ b/.github/rust.json @@ -0,0 +1,33 @@ +{ + "problemMatcher": [ + { + "owner": "rustfmt", + "severity": "warning", + "pattern": [ + { + "regexp": "^(Diff in (.+)) at line (\\d+):$", + "message": 1, + "file": 2, + "line": 3 + } + ] + }, + { + "owner": "clippy", + "pattern": [ + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*(warning|warn|error)(?:\\x1b\\[[\\d;]+m)*(\\[(.*)\\])?(?:\\x1b\\[[\\d;]+m)*:(?:\\x1b\\[[\\d;]+m)* ([^\\x1b]*)(?:\\x1b\\[[\\d;]+m)*$", + "severity": 1, + "message": 4, + "code": 3 + }, + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*\\s*(?:\\x1b\\[[\\d;]+m)*\\s*--> (?:\\x1b\\[[\\d;]+m)*(.*):(\\d*):(\\d*)(?:\\x1b\\[[\\d;]+m)*$", + "file": 1, + "line": 2, + "column": 3 + } + ] + } + ] +} diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml index 9a5015005b3..4b97637088c 100644 --- a/.github/workflows/autopublish.yaml +++ b/.github/workflows/autopublish.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 964be478fa3..62fbd57abc1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ jobs: typescript: ${{ steps.filter.outputs.typescript }} proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 id: filter with: @@ -56,7 +56,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} @@ -65,6 +65,10 @@ jobs: rustup update --no-self-update ${{ env.RUST_CHANNEL }} rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup default ${{ env.RUST_CHANNEL }} + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json + - name: Install Rust Problem Matcher + if: matrix.os == 'ubuntu-latest' + run: echo "::add-matcher::.github/rust.json" - name: Cache Dependencies uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 @@ -107,6 +111,10 @@ jobs: if: matrix.os == 'windows-latest' run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + - name: rustfmt + if: matrix.os == 'ubuntu-latest' + run: cargo fmt -- --check + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' @@ -121,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: | @@ -153,13 +161,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: needs.changes.outputs.typescript == 'true' - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 if: needs.changes.outputs.typescript == 'true' - name: Install xvfb diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 5af8aa1f77a..f88c7f95d5c 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index e6a9917a0bf..be9f504e599 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -21,7 +21,7 @@ jobs: rustup component add rustfmt rust-src rustup default stable - name: Cache cargo - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -36,10 +36,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -52,7 +52,7 @@ jobs: run: cargo xtask metrics build - name: Cache target - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -73,10 +73,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -86,7 +86,7 @@ jobs: key: ${{ runner.os }}-cargo-${{ github.sha }} - name: Restore target cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -106,7 +106,7 @@ jobs: needs: [build_metrics, other_metrics] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download build metrics uses: actions/download-artifact@v3 diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml index 6d026c9ad91..862373ec1cc 100644 --- a/.github/workflows/publish-libs.yaml +++ b/.github/workflows/publish-libs.yaml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9077a9ac21e..adb1c850516 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -78,9 +78,9 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -154,7 +154,7 @@ jobs: run: apk add --no-cache git clang lld musl-dev nodejs npm - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -188,9 +188,9 @@ jobs: needs: ["dist", "dist-x86_64-unknown-linux-musl"] steps: - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' @@ -199,7 +199,7 @@ jobs: - run: 'echo "TAG: $TAG"' - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml index 05f3e254e5f..12a1a791fda 100644 --- a/.github/workflows/rustdoc.yaml +++ b/.github/workflows/rustdoc.yaml @@ -17,7 +17,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: rustup update --no-self-update stable diff --git a/Cargo.lock b/Cargo.lock index dc2bf3a7694..7b29d7bb798 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1329,6 +1329,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "ra-ap-rustc_lexer", "span", "stdx", "tt", @@ -1470,12 +1471,12 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a41dee58608b1fc93779ea365edaa70ac9927e3335ae914b675be0fa063cd7" +checksum = "df5a0ba0d08af366cf235dbe8eb7226cced7a4fe502c98aa434ccf416defd746" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.36.0", + "ra-ap-rustc_index_macros 0.37.0", "smallvec", ] @@ -1493,9 +1494,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbfe98def54c4337a2f7d8233850bd5d5349972b185fe8a0db2b979164b30ed8" +checksum = "1971ebf9a701e0e68387c264a32517dcb4861ad3a4862f2e2803c1121ade20d5" dependencies = [ "proc-macro2", "quote", @@ -1525,11 +1526,11 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5529bffec7530b4a3425640bfdfd9b95d87c4c620f740266c0de6572561aab4" +checksum = "2c3c0e7ca9c5bdc66e3b590688e237a22ac47a48e4eac7f46b05b2abbfaf0abd" dependencies = [ - "ra-ap-rustc_index 0.36.0", + "ra-ap-rustc_index 0.37.0", "rustc-hash", "rustc_apfloat", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index 2b81f7b11b2..49c7d369190 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ ra-ap-rustc_lexer = { version = "0.35.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.36.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.37.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 9560826e373..a817cd0c3ac 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; @@ -243,6 +242,7 @@ impl CrateDisplayName { CrateDisplayName { crate_name, canonical_name } } } + pub type TargetLayoutLoadResult = Result, Arc>; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -291,71 +291,6 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level - // things. This info does need to be somewhat present though as to prevent deduplication from - // happening across different workspaces with different layouts. - pub target_layout: TargetLayoutLoadResult, - pub toolchain: Option, -} - -impl CrateData { - /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { - // This method has some obscure bits. These are mostly there to be compliant with - // some patches. References to the patches are given. - if self.root_file_id != other.root_file_id { - return false; - } - - if self.display_name != other.display_name { - return false; - } - - if self.is_proc_macro != other.is_proc_macro { - return false; - } - - if self.edition != other.edition { - return false; - } - - if self.version != other.version { - return false; - } - - let mut opts = self.cfg_options.difference(&other.cfg_options); - if let Some(it) = opts.next() { - // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs. - // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894 - if it.to_string() != "rust_analyzer" { - return false; - } - - if opts.next().is_some() { - return false; - } - } - - if self.env != other.env { - return false; - } - - let slf_deps = self.dependencies.iter(); - let other_deps = other.dependencies.iter(); - - if ignore_dev_deps { - return slf_deps - .clone() - .filter(|it| it.kind != DependencyKind::Dev) - .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev)); - } - - slf_deps.eq(other_deps) - } - - pub fn channel(&self) -> Option { - self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) - } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -398,32 +333,22 @@ pub enum DependencyKind { pub struct Dependency { pub crate_id: CrateId, pub name: CrateName, - kind: DependencyKind, prelude: bool, } impl Dependency { - pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self { - Self { name, crate_id, prelude: true, kind } + pub fn new(name: CrateName, crate_id: CrateId) -> Self { + Self { name, crate_id, prelude: true } } - pub fn with_prelude( - name: CrateName, - crate_id: CrateId, - prelude: bool, - kind: DependencyKind, - ) -> Self { - Self { name, crate_id, prelude, kind } + pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self { + Self { name, crate_id, prelude } } /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude } - - pub fn kind(&self) -> DependencyKind { - self.kind - } } impl CrateGraph { @@ -438,8 +363,6 @@ impl CrateGraph { env: Env, is_proc_macro: bool, origin: CrateOrigin, - target_layout: Result, Arc>, - toolchain: Option, ) -> CrateId { let data = CrateData { root_file_id, @@ -451,9 +374,7 @@ impl CrateGraph { env, dependencies: Vec::new(), origin, - target_layout, is_proc_macro, - toolchain, }; self.arena.alloc(data) } @@ -523,6 +444,10 @@ impl CrateGraph { self.arena.is_empty() } + pub fn len(&self) -> usize { + self.arena.len() + } + pub fn iter(&self) -> impl Iterator + '_ { self.arena.iter().map(|(idx, _)| idx) } @@ -623,13 +548,17 @@ impl CrateGraph { /// /// This will deduplicate the crates of the graph where possible. /// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id. - /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted. + /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also + /// have the crate dependencies sorted. + /// + /// Returns a mapping from `other`'s crate ids to the new crate ids in `self`. pub fn extend( &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - on_finished: impl FnOnce(&FxHashMap), - ) { + merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool, + ) -> FxHashMap { + let m = self.len(); let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -637,51 +566,21 @@ impl CrateGraph { crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); - let res = self.arena.iter().find_map(|(id, data)| { - match (&data.origin, &crate_data.origin) { - (a, b) if a == b => { - if data.eq_ignoring_origin_and_deps(crate_data, false) { - return Some((id, false)); - } - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - if data.eq_ignoring_origin_and_deps(crate_data, true) { - return Some((id, !a.is_local())); - } - } - (_, _) => return None, - } + let res = self + .arena + .iter_mut() + .take(m) + .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id)); - None - }); - - if let Some((res, should_update_lib_to_local)) = res { - id_map.insert(topo, res); - if should_update_lib_to_local { - assert!(self.arena[res].origin.is_lib()); - assert!(crate_data.origin.is_local()); - self.arena[res].origin = crate_data.origin.clone(); - - // Move local's dev dependencies into the newly-local-formerly-lib crate. - self.arena[res].dependencies = crate_data.dependencies.clone(); - } - } else { - let id = self.arena.alloc(crate_data.clone()); - id_map.insert(topo, id); - } + let new_id = + if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) }; + id_map.insert(topo, new_id); } *proc_macros = mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect(); - on_finished(&id_map); + id_map } fn find_path( @@ -719,11 +618,9 @@ impl CrateGraph { match (cfg_if, std) { (Some(cfg_if), Some(std)) => { self.arena[cfg_if].dependencies.clear(); - self.arena[std].dependencies.push(Dependency::new( - CrateName::new("cfg_if").unwrap(), - cfg_if, - DependencyKind::Normal, - )); + self.arena[std] + .dependencies + .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if)); true } _ => false, @@ -871,7 +768,7 @@ impl fmt::Display for CyclicDependenciesError { #[cfg(test)] mod tests { - use crate::{CrateOrigin, DependencyKind}; + use crate::CrateOrigin; use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; @@ -888,8 +785,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -901,8 +796,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -914,26 +807,15 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); assert!(graph - .add_dep( - crate3, - Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal) - ) + .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,)) .is_err()); } @@ -950,8 +832,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -963,20 +843,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_err()); } @@ -993,8 +865,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1006,8 +876,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -1019,20 +887,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); } @@ -1049,8 +909,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1062,26 +920,16 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( crate1, - Dependency::new( - CrateName::normalize_dashes("crate-name-with-dashes"), - crate2, - DependencyKind::Normal - ) + Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,) ) .is_ok()); assert_eq!( graph[crate1].dependencies, - vec![Dependency::new( - CrateName::new("crate_name_with_dashes").unwrap(), - crate2, - DependencyKind::Normal - )] + vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)] ); } } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index d7fc9d4c95c..cb2e6cdaa28 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; + + // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query + #[salsa::input] + fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult; + + #[salsa::input] + fn toolchain(&self, krate: CrateId) -> Option; + + #[salsa::transparent] + fn toolchain_channel(&self, krate: CrateId) -> Option; +} + +fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option { + db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index c59aff2a8bb..ee39a2790bc 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -14,7 +14,7 @@ use std::{ use command_group::{CommandGroup, GroupChild}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use paths::AbsPathBuf; +use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; use stdx::process::streaming_output; @@ -23,6 +23,7 @@ pub use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, }; +use toolchain::Tool; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -89,9 +90,10 @@ impl FlycheckHandle { id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckHandle { - let actor = FlycheckActor::new(id, sender, config, workspace_root); + let actor = FlycheckActor::new(id, sender, config, sysroot_root, workspace_root); let (sender, receiver) = unbounded::(); let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) .name("Flycheck".to_owned()) @@ -101,13 +103,15 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub fn restart_workspace(&self) { - self.sender.send(StateChange::Restart(None)).unwrap(); + pub fn restart_workspace(&self, saved_file: Option) { + self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. pub fn restart_for_package(&self, package: String) { - self.sender.send(StateChange::Restart(Some(package))).unwrap(); + self.sender + .send(StateChange::Restart { package: Some(package), saved_file: None }) + .unwrap(); } /// Stop this cargo check worker. @@ -158,7 +162,7 @@ pub enum Progress { } enum StateChange { - Restart(Option), + Restart { package: Option, saved_file: Option }, Cancel, } @@ -171,6 +175,7 @@ struct FlycheckActor { /// Either the workspace root of the workspace we are flychecking, /// or the project root of the project. root: AbsPathBuf, + sysroot_root: Option, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library /// doesn't provide a way to read sub-process output without blocking, so we @@ -184,15 +189,25 @@ enum Event { CheckEvent(Option), } +const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; + impl FlycheckActor { fn new( id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); - FlycheckActor { id, sender, config, root: workspace_root, command_handle: None } + FlycheckActor { + id, + sender, + config, + sysroot_root, + root: workspace_root, + command_handle: None, + } } fn report_progress(&self, progress: Progress) { @@ -218,7 +233,7 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart(package)) => { + Event::RequestStateChange(StateChange::Restart { package, saved_file }) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -228,7 +243,11 @@ impl FlycheckActor { } } - let command = self.check_command(package.as_deref()); + let command = + match self.check_command(package.as_deref(), saved_file.as_deref()) { + Some(c) => c, + None => continue, + }; let formatted_command = format!("{:?}", command); tracing::debug!(?command, "will restart flycheck"); @@ -302,7 +321,14 @@ impl FlycheckActor { } } - fn check_command(&self, package: Option<&str>) -> Command { + /// Construct a `Command` object for checking the user's code. If the user + /// has specified a custom command with placeholders that we cannot fill, + /// return None. + fn check_command( + &self, + package: Option<&str>, + saved_file: Option<&AbsPath>, + ) -> Option { let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, @@ -316,7 +342,10 @@ impl FlycheckActor { ansi_color_output, target_dir, } => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + if let Some(sysroot_root) = &self.sysroot_root { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); + } cmd.arg(command); cmd.current_dir(&self.root); @@ -355,7 +384,7 @@ impl FlycheckActor { cmd.arg("--target-dir").arg(target_dir); } cmd.envs(extra_env); - (cmd, extra_args) + (cmd, extra_args.clone()) } FlycheckConfig::CustomCommand { command, @@ -384,12 +413,34 @@ impl FlycheckActor { } } - (cmd, args) + if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) { + // If the custom command has a $saved_file placeholder, and + // we're saving a file, replace the placeholder in the arguments. + if let Some(saved_file) = saved_file { + let args = args + .iter() + .map(|arg| { + if arg == SAVED_FILE_PLACEHOLDER { + saved_file.to_string() + } else { + arg.clone() + } + }) + .collect(); + (cmd, args) + } else { + // The custom command has a $saved_file placeholder, + // but we had an IDE event that wasn't a file save. Do nothing. + return None; + } + } else { + (cmd, args.clone()) + } } }; cmd.args(args); - cmd + Some(cmd) } fn send(&self, check_task: Message) { diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index c91a5497262..519706c65f2 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -377,27 +377,39 @@ impl AttrsWithOwner { AttrDefId::GenericParamId(it) => match it { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 29ac666277d..5dc5fedd230 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -416,6 +416,11 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Return { expr }, syntax_ptr) } + ast::Expr::BecomeExpr(e) => { + let expr = + e.expr().map(|e| self.collect_expr(e)).unwrap_or_else(|| self.missing_expr()); + self.alloc_expr(Expr::Become { expr }, syntax_ptr) + } ast::Expr::YieldExpr(e) => { self.is_lowering_coroutine = true; let expr = e.expr().map(|e| self.collect_expr(e)); @@ -1000,10 +1005,6 @@ impl ExprCollector<'_> { krate: *krate, }); } - Some(ExpandError::RecursionOverflowPoisoned) => { - // Recursion limit has been reached in the macro expansion tree, but not in - // this very macro call. Don't add diagnostics to avoid duplication. - } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), @@ -1112,7 +1113,7 @@ impl ExprCollector<'_> { statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => (), + ast::Stmt::Item(_item) => statements.push(Statement::Item), } } diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 4afb4086517..7007dea638e 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -261,6 +261,11 @@ impl Printer<'_> { self.print_expr(*expr); } } + Expr::Become { expr } => { + w!(self, "become"); + self.whitespace(); + self.print_expr(*expr); + } Expr::Yield { expr } => { w!(self, "yield"); if let Some(expr) = expr { @@ -623,6 +628,7 @@ impl Printer<'_> { } wln!(self); } + Statement::Item => (), } } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index ab623250d40..69b82ae871a 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -197,6 +197,7 @@ fn compute_block_scopes( Statement::Expr { expr, .. } => { compute_expr_scopes(*expr, body, scopes, scope); } + Statement::Item => (), } } if let Some(expr) = tail { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 7ce05b64d02..f506864902c 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -634,7 +634,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - self.attr_calls.push((ast_id, call_id)); // If proc attribute macro expansion is disabled, skip expanding it here if !self.db.expand_proc_attr_macros() { continue 'attrs; @@ -647,10 +646,21 @@ impl<'a> AssocItemCollector<'a> { // disabled. This is analogous to the handling in // `DefCollector::collect_macros`. if exp.is_dummy() { + self.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + self.module_id.local_id, + loc.kind, + loc.def.krate, + )); + + continue 'attrs; + } + if exp.is_disabled() { continue 'attrs; } } + self.attr_calls.push((ast_id, call_id)); + let res = self.expander.enter_expand_id::(self.db, call_id); self.collect_macro_items(res, &|| loc.kind.clone()); diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index b83feeedc34..b99df1ed593 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -140,13 +140,11 @@ impl Expander { // The overflow error should have been reported when it occurred (see the next branch), // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); - return ExpandResult::only_err(ExpandError::RecursionOverflowPoisoned); + return ExpandResult::ok(None); } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { self.recursion_depth = u32::MAX; cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::other( - "reached recursion limit during macro expansion", - )); + return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 2e137f67b4c..26247ba5b50 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -447,18 +447,25 @@ fn select_best_path( } const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| { - let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude); + let choose = |new: (ModPath, _), old: (ModPath, _)| { + let (new_path, _) = &new; + let (old_path, _) = &old; + let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); + let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); match (new_has_prelude, old_has_prelude, prefer_prelude) { - (true, false, true) | (false, true, false) => new_path, - (true, false, false) | (false, true, true) => old_path, - // no prelude difference in the paths, so pick the smaller one + (true, false, true) | (false, true, false) => new, + (true, false, false) | (false, true, true) => old, + // no prelude difference in the paths, so pick the shorter one (true, true, _) | (false, false, _) => { - if new_path.0.len() < old_path.0.len() { - new_path + let new_path_is_shorter = new_path + .len() + .cmp(&old_path.len()) + .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) + .is_lt(); + if new_path_is_shorter { + new } else { - old_path + old } } } @@ -469,8 +476,8 @@ fn select_best_path( let rank = match prefer_no_std { false => |name: &Name| match name { name if name == &known::core => 0, - name if name == &known::alloc => 0, - name if name == &known::std => 1, + name if name == &known::alloc => 1, + name if name == &known::std => 2, _ => unreachable!(), }, true => |name: &Name| match name { @@ -1539,4 +1546,38 @@ pub mod foo { "krate::prelude::Foo", ); } + + #[test] + fn respect_segment_length() { + check_found_path( + r#" +//- /main.rs crate:main deps:petgraph +$0 +//- /petgraph.rs crate:petgraph +pub mod graph { + pub use crate::graph_impl::{ + NodeIndex + }; +} + +mod graph_impl { + pub struct NodeIndex(Ix); +} + +pub mod stable_graph { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} + +pub mod prelude { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} +"#, + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + ); + } } diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index ac44d379415..34b2910b4f5 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -182,6 +182,7 @@ pub enum Expr { tail: Option, }, Const(ConstBlockId), + // FIXME: Fold this into Block with an unsafe flag? Unsafe { id: Option, statements: Box<[Statement]>, @@ -216,6 +217,9 @@ pub enum Expr { Return { expr: Option, }, + Become { + expr: ExprId, + }, Yield { expr: Option, }, @@ -349,6 +353,9 @@ pub enum Statement { expr: ExprId, has_semi: bool, }, + // At the moment, we only use this to figure out if a return expression + // is really the last statement of a block. See #16566 + Item, } impl Expr { @@ -382,6 +389,7 @@ impl Expr { } } Statement::Expr { expr: expression, .. } => f(*expression), + Statement::Item => (), } } if let &Some(expr) = tail { @@ -410,6 +418,7 @@ impl Expr { f(expr); } } + Expr::Become { expr } => f(*expr), Expr::RecordLit { fields, spread, .. } => { for field in fields.iter() { f(field.expr); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index 0909d8c8354..63f211022c9 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -33,7 +33,7 @@ m!(&k"); "#, expect![[r#" macro_rules! m { ($i:literal) => {}; } -/* error: invalid token tree */"#]], +/* error: mismatched delimiters */"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index e875950e4e5..2d289b76833 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -68,26 +68,26 @@ m2!(); "#, expect![[r#" macro_rules! i1 { invalid } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e1 { $i:ident => () } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e2 { ($i:ident) () } -/* error: invalid macro definition: expected `=` */ +/* error: macro definition has parse errors */ macro_rules! e3 { ($(i:ident)_) => () } -/* error: invalid macro definition: invalid repeat */ +/* error: macro definition has parse errors */ macro_rules! f1 { ($i) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f2 { ($i:) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f3 { ($i:_) => () } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! m1 { ($$i) => () } -/* error: invalid macro definition: `$$` is not allowed on the pattern side */ +/* error: macro definition has parse errors */ macro_rules! m2 { () => ( ${invalid()} ) } -/* error: invalid macro definition: invalid metavariable expression */ +/* error: macro definition has parse errors */ "#]], ) } @@ -137,18 +137,18 @@ macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; } macro_rules! mA { ($($($($i:ident)+)?)*) => {}; } macro_rules! mB { ($($($($i:ident)+)*)?) => {}; } -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 6560d0ec466..bf701198387 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -275,9 +275,9 @@ macro_rules! depth_too_large { } fn test() { - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; } "#]], ); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 6717ee1aa5f..4aad53c3bd7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -1090,3 +1090,57 @@ fn main() { "#]], ); } + +#[test] +fn regression_16529() { + check( + r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + nameable! { + Name['a] + } +} +"#, + expect![[r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + /* error: unexpected token in input */$crate::any::nameable! { + struct $name[$a]a + } +} +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index ae56934f632..362c189f6a7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -97,8 +97,8 @@ m2!(x macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m2 { ($x:ident) => {} } -/* error: invalid macro definition: expected subtree */ -/* error: invalid token tree */ +/* error: macro definition has parse errors */ +/* error: mismatched delimiters */ "#]], ) } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index fc5a6e80a42..23b10cfd8e6 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -58,6 +58,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream name: "identity_when_valid".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), + disabled: false, }, )]; let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 21cc28f1b3d..88838f58fe7 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -11,7 +11,7 @@ use either::Either; use hir_expand::{ ast_id_map::FileAstId, attrs::{Attr, AttrId}, - builtin_attr_macro::find_builtin_attr, + builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander}, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, name::{name, AsName, Name}, @@ -98,9 +98,13 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI }; ( name.as_name(), - CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( - idx as u32, - )), + if it.disabled { + CustomProcMacroExpander::disabled() + } else { + CustomProcMacroExpander::new( + hir_expand::proc_macro::ProcMacroId::new(idx as u32), + ) + }, ) }) .collect()) @@ -604,9 +608,6 @@ impl DefCollector<'_> { id: ItemTreeId, fn_id: FunctionId, ) { - if self.def_map.block.is_some() { - return; - } let kind = def.kind.to_basedb_kind(); let (expander, kind) = match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { @@ -1120,9 +1121,16 @@ impl DefCollector<'_> { let mut push_resolved = |directive: &MacroDirective, call_id| { resolved.push((directive.module_id, directive.depth, directive.container, call_id)); }; + + #[derive(PartialEq, Eq)] + enum Resolved { + Yes, + No, + } + let mut res = ReachedFixedPoint::Yes; // Retain unresolved macros after this round of resolution. - macros.retain(|directive| { + let mut retain = |directive: &MacroDirective| { let subns = match &directive.kind { MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang, MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => { @@ -1156,10 +1164,11 @@ impl DefCollector<'_> { self.def_map.modules[directive.module_id] .scope .add_macro_invoc(ast_id.ast_id, call_id); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { @@ -1198,7 +1207,7 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => { @@ -1221,7 +1230,7 @@ impl DefCollector<'_> { } .collect(&[*mod_item], directive.container); res = ReachedFixedPoint::No; - false + Resolved::Yes }; if let Some(ident) = path.as_ident() { @@ -1237,13 +1246,18 @@ impl DefCollector<'_> { let def = match resolver_def_id(path.clone()) { Some(def) if def.is_attribute() => def, - _ => return true, + _ => return Resolved::No, }; - if matches!( - def, - MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } - if expander.is_derive() - ) { + + if let MacroDefId { + kind: + MacroDefKind::BuiltInAttr( + BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst, + _, + ), + .. + } = def + { // Resolved to `#[derive]`, we don't actually expand this attribute like // normal (as that would just be an identity expansion with extra output) // Instead we treat derive attributes special and apply them separately. @@ -1316,16 +1330,6 @@ impl DefCollector<'_> { let call_id = attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( @@ -1337,17 +1341,29 @@ impl DefCollector<'_> { } if let MacroDefKind::ProcMacro(exp, ..) = def.kind { - if exp.is_dummy() { - // If there's no expander for the proc macro (e.g. - // because proc macros are disabled, or building the - // proc macro crate failed), report this and skip - // expansion like we would if it was disabled + // If proc attribute macro expansion is disabled, skip expanding it here + if !self.db.expand_proc_attr_macros() { self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, self.db.lookup_intern_macro_call(call_id).kind, def.krate, )); + return recollect_without(self); + } + // If there's no expander for the proc macro (e.g. + // because proc macros are disabled, or building the + // proc macro crate failed), report this and skip + // expansion like we would if it was disabled + if exp.is_dummy() { + self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + directive.module_id, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, + )); + return recollect_without(self); + } + if exp.is_disabled() { return recollect_without(self); } } @@ -1358,12 +1374,13 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } - true - }); + Resolved::No + }; + macros.retain(|it| retain(it) == Resolved::No); // Attribute resolution can add unresolved macro invocations, so concatenate the lists. macros.extend(mem::take(&mut self.unresolved_macros)); self.unresolved_macros = macros; @@ -1673,7 +1690,11 @@ impl ModCollector<'_, '_> { FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); - if self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT { + + if self.def_collector.def_map.block.is_none() + && self.def_collector.is_proc_macro + && self.module_id == DefMap::ROOT + { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { self.def_collector.export_proc_macro( proc_macro, @@ -2333,7 +2354,7 @@ impl ModCollector<'_, '_> { resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { - // FIXME: if there were errors, this mightve been in the eager expansion from an + // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 0a3f7bf7ec3..161b2c05990 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -103,6 +103,9 @@ impl DefDiagnostic { } // FIXME: Whats the difference between this and unresolved_macro_call + // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc + // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this + // struct loses all that information! pub(crate) fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 6d3de0e55d2..90cd3af7578 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -446,7 +446,7 @@ fn compile_error_expand( ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { - Some(unquoted) => ExpandError::other(unquoted), + Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()), None => ExpandError::other("`compile_error!` argument must be a string"), }, _ => ExpandError::other("`compile_error!` argument must be a string"), diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs index 67b7df198e9..c6611438e64 100644 --- a/crates/hir-expand/src/change.rs +++ b/crates/hir-expand/src/change.rs @@ -1,6 +1,10 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. -use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use base_db::{ + salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + TargetLayoutLoadResult, Version, +}; +use la_arena::RawIdx; use span::FileId; use triomphe::Arc; @@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; pub struct Change { pub source_change: FileChange, pub proc_macros: Option, + pub toolchains: Option>>, + pub target_data_layouts: Option>, } impl Change { @@ -22,6 +28,24 @@ impl Change { if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } + if let Some(target_data_layouts) = self.target_data_layouts { + for (id, val) in target_data_layouts.into_iter().enumerate() { + db.set_data_layout_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } + if let Some(toolchains) = self.toolchains { + for (id, val) in toolchains.into_iter().enumerate() { + db.set_toolchain_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } } pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { @@ -36,6 +60,14 @@ impl Change { self.proc_macros = Some(proc_macros); } + pub fn set_toolchains(&mut self, toolchains: Vec>) { + self.toolchains = Some(toolchains); + } + + pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec) { + self.target_data_layouts = Some(target_data_layouts); + } + pub fn set_roots(&mut self, roots: Vec) { self.source_change.set_roots(roots) } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6a288cf9197..7b62eaa0289 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; + ) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] #[salsa::invoke(TokenExpander::macro_expander)] @@ -326,58 +326,77 @@ fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, // FIXME: consider the following by putting fixup info into eager call info args - // ) -> ValueResult>, Arc>> { -) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { - let mismatched_delimiters = |arg: &SyntaxNode| { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - Some(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]) as Box<[_]>)) - } else { - None - } - }; + // ) -> ValueResult, Arc>> { +) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>> { let loc = db.lookup_intern_macro_call(id); if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) + ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE)) } else { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { + let dummy_tt = |kind| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter { + open: loc.call_site, + close: loc.call_site, + kind, + }, + token_trees: Box::default(), + }), + SyntaxFixupUndoInfo::default(), + ) + }; + let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); - match node.token_tree() { - Some(tt) => { - let tt = tt.syntax(); - if let Some(e) = mismatched_delimiters(tt) { - return ValueResult::only_err(e); - } - tt.clone() - } - None => { - return ValueResult::only_err(Arc::new(Box::new([ - SyntaxError::new_at_offset("missing token tree".to_owned(), offset), - ]))); - } + let Some(tt) = node.token_tree() else { + return ValueResult::new( + dummy_tt(tt::DelimiterKind::Invisible), + Arc::new(Box::new([SyntaxError::new_at_offset( + "missing token tree".to_owned(), + offset, + )])), + ); + }; + let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']); + let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]); + + let mismatched_delimiters = !matches!( + (first, last), + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) + ); + if mismatched_delimiters { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // So instead, we'll return an empty subtree here + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + + let kind = match first { + _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible, + T!['('] => tt::DelimiterKind::Parenthesis, + T!['['] => tt::DelimiterKind::Bracket, + T!['{'] => tt::DelimiterKind::Brace, + _ => tt::DelimiterKind::Invisible, + }; + return ValueResult::new( + dummy_tt(kind), + Arc::new(Box::new([SyntaxError::new_at_offset( + "mismatched delimiters".to_owned(), + offset, + )])), + ); } + tt.syntax().clone() } MacroCallKind::Derive { ast_id, .. } => { ast_id.to_ptr(db).to_node(&root).syntax().clone() @@ -427,15 +446,15 @@ fn macro_arg( if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { match parse.errors() { - [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), + [] => ValueResult::ok((Arc::new(tt), undo_info)), errors => ValueResult::new( - Some((Arc::new(tt), undo_info)), + (Arc::new(tt), undo_info), // Box::<[_]>::from(res.errors()), not stable yet Arc::new(errors.to_vec().into_boxed_slice()), ), } } else { - ValueResult::ok(Some((Arc::new(tt), undo_info))) + ValueResult::ok((Arc::new(tt), undo_info)) } } } @@ -519,21 +538,20 @@ fn macro_expand( expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { - let ValueResult { value, err } = db.macro_arg(macro_call_id); - let Some((macro_arg, undo_info)) = value else { - return ExpandResult { - value: CowArc::Owned(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; + let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id); + let format_parse_err = |err: Arc>| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) }; let arg = &*macro_arg; - match loc.def.kind { + let res = match loc.def.kind { MacroDefKind::Declarative(id) => { db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) } @@ -549,16 +567,7 @@ fn macro_expand( MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { value: CowArc::Arc(macro_arg.clone()), - err: err.map(|err| { - let mut buf = String::new(); - for err in &**err { - use std::fmt::Write; - _ = write!(buf, "{}, ", err); - } - buf.pop(); - buf.pop(); - ExpandError::other(buf) - }), + err: err.map(format_parse_err), }; } MacroDefKind::BuiltInEager(it, _) => { @@ -570,6 +579,11 @@ fn macro_expand( res } _ => unreachable!(), + }; + ExpandResult { + value: res.value, + // if the arg had parse errors, show them instead of the expansion errors + err: err.map(format_parse_err).or(res.err), } } }; @@ -597,17 +611,7 @@ fn macro_expand( fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; - }; + let (macro_arg, undo_info) = db.macro_arg(id).value; let expander = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => expander, diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 37084ee8b93..6874336cd2d 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -31,7 +31,7 @@ impl DeclarativeMacroExpander { call_id: MacroCallId, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(call_id); - let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let toolchain = db.toolchain(loc.def.krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -44,9 +44,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self .mac @@ -67,7 +67,7 @@ impl DeclarativeMacroExpander { krate: CrateId, call_site: Span, ) -> ExpandResult { - let toolchain = &db.crate_graph()[krate].toolchain; + let toolchain = db.toolchain(krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -80,9 +80,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } @@ -119,7 +119,7 @@ impl DeclarativeMacroExpander { _ => None, } }; - let toolchain = crate_data.toolchain.as_ref(); + let toolchain = db.toolchain(def_crate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index fd028182faf..020ca75d80c 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -44,7 +44,6 @@ use crate::{ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::{ExpandDatabase, TokenExpander}, - fixup::SyntaxFixupUndoInfo, hygiene::SyntaxContextData, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -129,8 +128,11 @@ pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum ExpandError { UnresolvedProcMacro(CrateId), + /// The macro expansion is disabled. + MacroDisabled, + MacroDefinition, Mbe(mbe::ExpandError), - RecursionOverflowPoisoned, + RecursionOverflow, Other(Box>), ProcMacroPanic(Box>), } @@ -152,14 +154,14 @@ impl fmt::Display for ExpandError { match self { ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::Mbe(it) => it.fmt(f), - ExpandError::RecursionOverflowPoisoned => { - f.write_str("overflow expanding the original macro") - } + ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"), ExpandError::ProcMacroPanic(it) => { f.write_str("proc-macro panicked: ")?; f.write_str(it) } ExpandError::Other(it) => f.write_str(it), + ExpandError::MacroDisabled => f.write_str("macro disabled"), + ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"), } } } @@ -225,8 +227,8 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing - // leakage problems here + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` + // but we need to fix the `cfg_attr` handling first. attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// @@ -758,15 +760,7 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - ( - Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - SyntaxFixupUndoInfo::NONE, - ) - }); + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value; let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index b64c3549e42..136b0935be2 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -94,6 +94,21 @@ impl ModPath { } } + pub fn textual_len(&self) -> usize { + let base = match self.kind { + PathKind::Plain => 0, + PathKind::Super(0) => "self".len(), + PathKind::Super(i) => "super".len() * i as usize, + PathKind::Crate => "crate".len(), + PathKind::Abs => 0, + PathKind::DollarCrate(_) => "$crate".len(), + }; + self.segments() + .iter() + .map(|segment| segment.as_str().map_or(0, str::len)) + .fold(base, core::ops::Add::add) + } + pub fn is_ident(&self) -> bool { self.as_ident().is_some() } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 70b47fc54b1..ca6fc0afe2d 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -12,7 +12,13 @@ use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); +pub struct ProcMacroId(u32); + +impl ProcMacroId { + pub fn new(u32: u32) -> Self { + ProcMacroId(u32) + } +} #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { @@ -49,6 +55,7 @@ pub struct ProcMacro { pub name: SmolStr, pub kind: ProcMacroKind, pub expander: sync::Arc, + pub disabled: bool, } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] @@ -56,20 +63,35 @@ pub struct CustomProcMacroExpander { proc_macro_id: ProcMacroId, } -const DUMMY_ID: u32 = !0; - impl CustomProcMacroExpander { + const DUMMY_ID: u32 = !0; + const DISABLED_ID: u32 = !1; + pub fn new(proc_macro_id: ProcMacroId) -> Self { - assert_ne!(proc_macro_id.0, DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DISABLED_ID); Self { proc_macro_id } } - pub fn dummy() -> Self { - Self { proc_macro_id: ProcMacroId(DUMMY_ID) } + /// A dummy expander that always errors. This is used for proc-macros that are missing, usually + /// due to them not being built yet. + pub const fn dummy() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) } } - pub fn is_dummy(&self) -> bool { - self.proc_macro_id.0 == DUMMY_ID + /// The macro was not yet resolved. + pub const fn is_dummy(&self) -> bool { + self.proc_macro_id.0 == Self::DUMMY_ID + } + + /// A dummy expander that always errors. This expander is used for macros that have been disabled. + pub const fn disabled() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) } + } + + /// The macro is explicitly disabled and cannot be expanded. + pub const fn is_disabled(&self) -> bool { + self.proc_macro_id.0 == Self::DISABLED_ID } pub fn expand( @@ -84,10 +106,14 @@ impl CustomProcMacroExpander { mixed_site: Span, ) -> ExpandResult { match self.proc_macro_id { - ProcMacroId(DUMMY_ID) => ExpandResult::new( + ProcMacroId(Self::DUMMY_ID) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::UnresolvedProcMacro(def_crate), ), + ProcMacroId(Self::DISABLED_ID) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::MacroDisabled, + ), ProcMacroId(id) => { let proc_macros = db.proc_macros(); let proc_macros = match proc_macros.get(&def_crate) { @@ -110,7 +136,7 @@ impl CustomProcMacroExpander { ); return ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other("Internal error"), + ExpandError::other("Internal error: proc-macro index out of bounds"), ); } }; diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 7f8fb7f4b52..c4329a7b82b 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -169,9 +169,9 @@ impl ExprValidator { return; } - let pattern_arena = Arena::new(); - let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + let pattern_arena = Arena::new(); let mut m_arms = Vec::with_capacity(arms.len()); let mut has_lowering_errors = false; for arm in arms { @@ -196,8 +196,9 @@ impl ExprValidator { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. + let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { - pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors), + pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), arm_data: (), }; @@ -223,7 +224,7 @@ impl ExprValidator { ValidityConstraint::ValidOnly, ) { Ok(report) => report, - Err(void) => match void {}, + Err(()) => return, }; // FIXME Report unreachable arms @@ -245,10 +246,10 @@ impl ExprValidator { db: &dyn HirDatabase, body: &Body, have_errors: &mut bool, - ) -> &'p DeconstructedPat<'p> { + ) -> DeconstructedPat<'p> { let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); let pattern = patcx.lower_pattern(pat); - let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern)); + let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { *have_errors = true; } diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 712842372b6..e98a946a870 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -1,6 +1,7 @@ //! Interface with `rustc_pattern_analysis`. use std::fmt; +use tracing::debug; use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use rustc_hash::FxHashMap; @@ -11,7 +12,6 @@ use rustc_pattern_analysis::{ }; use smallvec::{smallvec, SmallVec}; use stdx::never; -use typed_arena::Arena; use crate::{ db::HirDatabase, @@ -26,7 +26,7 @@ use Constructor::*; // Re-export r-a-specific versions of all these types. pub(crate) type DeconstructedPat<'p> = - rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>; + rustc_pattern_analysis::pat::DeconstructedPat>; pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat>; @@ -40,7 +40,6 @@ pub(crate) struct MatchCheckCtx<'p> { module: ModuleId, body: DefWithBodyId, pub(crate) db: &'p dyn HirDatabase, - pub(crate) pattern_arena: &'p Arena>, exhaustive_patterns: bool, min_exhaustive_patterns: bool, } @@ -52,17 +51,12 @@ pub(crate) struct PatData<'p> { } impl<'p> MatchCheckCtx<'p> { - pub(crate) fn new( - module: ModuleId, - body: DefWithBodyId, - db: &'p dyn HirDatabase, - pattern_arena: &'p Arena>, - ) -> Self { + pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let min_exhaustive_patterns = def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); - Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns } + Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } } fn is_uninhabited(&self, ty: &Ty) -> bool { @@ -131,15 +125,15 @@ impl<'p> MatchCheckCtx<'p> { } pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); + let singleton = |pat| vec![pat]; let ctor; - let fields: &[_]; + let fields: Vec<_>; match pat.kind.as_ref() { PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } PatKind::Deref { subpattern } => { ctor = match pat.ty.kind(Interner) { @@ -157,7 +151,7 @@ impl<'p> MatchCheckCtx<'p> { match pat.ty.kind(Interner) { TyKind::Tuple(_, substs) => { ctor = Struct; - let mut wilds: SmallVec<[_; 2]> = substs + let mut wilds: Vec<_> = substs .iter(Interner) .map(|arg| arg.assert_ty_ref(Interner).clone()) .map(DeconstructedPat::wildcard) @@ -166,7 +160,7 @@ impl<'p> MatchCheckCtx<'p> { let idx: u32 = pat.field.into_raw().into(); wilds[idx as usize] = self.lower_pat(&pat.pattern); } - fields = self.pattern_arena.alloc_extend(wilds) + fields = wilds } TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => { // The only legal patterns of type `Box` (outside `std`) are `_` and box @@ -216,33 +210,29 @@ impl<'p> MatchCheckCtx<'p> { field_id_to_id[field_idx as usize] = Some(i); ty }); - let mut wilds: SmallVec<[_; 2]> = - tys.map(DeconstructedPat::wildcard).collect(); + let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect(); for pat in subpatterns { let field_idx: u32 = pat.field.into_raw().into(); if let Some(i) = field_id_to_id[field_idx as usize] { wilds[i] = self.lower_pat(&pat.pattern); } } - fields = self.pattern_arena.alloc_extend(wilds); + fields = wilds; } _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } } } &PatKind::LiteralBool { value } => { ctor = Bool(value); - fields = &[]; + fields = Vec::new(); } PatKind::Or { pats } => { ctor = Or; - // Collect here because `Arena::alloc_extend` panics on reentrancy. - let subpats: SmallVec<[_; 2]> = - pats.iter().map(|pat| self.lower_pat(pat)).collect(); - fields = self.pattern_arena.alloc_extend(subpats); + fields = pats.iter().map(|pat| self.lower_pat(pat)).collect(); } } let data = PatData { db: self.db }; @@ -307,7 +297,7 @@ impl<'p> MatchCheckCtx<'p> { } impl<'p> TypeCx for MatchCheckCtx<'p> { - type Error = Void; + type Error = (); type Ty = Ty; type VariantIdx = EnumVariantId; type StrLit = Void; @@ -463,7 +453,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { fn write_variant_name( f: &mut fmt::Formatter<'_>, - pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, + pat: &rustc_pattern_analysis::pat::DeconstructedPat, ) -> fmt::Result { let variant = pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); @@ -485,8 +475,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { Ok(()) } - fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { - panic!("{}", fmt) + fn bug(&self, fmt: fmt::Arguments<'_>) { + debug!("{}", fmt) } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 71c3f89716d..1977f00517c 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, - Scalar, TyKind, TypeFlags, + Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ @@ -58,8 +58,9 @@ use crate::{ static_lifetime, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, - AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, - Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, + InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment, + TraitRef, Ty, TyBuilder, TyExt, }; // This lint has a false positive here. See the link below for details. @@ -68,7 +69,7 @@ use crate::{ #[allow(unreachable_pub)] pub use coerce::could_coerce; #[allow(unreachable_pub)] -pub use unify::could_unify; +pub use unify::{could_unify, could_unify_deeply}; use cast::CastCheck; pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; @@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> { for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); } - for mismatch in type_mismatches.values_mut() { + type_mismatches.retain(|_, mismatch| { mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone()); - } + chalk_ir::zip::Zip::zip_with( + &mut UnknownMismatch(self.db), + Variance::Invariant, + &mismatch.expected, + &mismatch.actual, + ) + .is_ok() + }); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { @@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges { *self = *self | other; } } +/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out +/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful. +/// As the cause is usually an underlying name resolution problem. +struct UnknownMismatch<'db>(&'db dyn HirDatabase); +impl chalk_ir::zip::Zipper for UnknownMismatch<'_> { + fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> { + let zip_substs = |this: &mut Self, + variances, + sub_a: &Substitution, + sub_b: &Substitution| { + this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner)) + }; + match (a.kind(Interner), b.kind(Interner)) { + (TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs( + self, + Some(self.unification_database().adt_variance(*id_a)), + sub_a, + sub_b, + )?, + ( + TyKind::AssociatedType(assoc_ty_a, sub_a), + TyKind::AssociatedType(assoc_ty_b, sub_b), + ) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b)) + if arity_a == arity_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b)) + if opaque_ty_a == opaque_ty_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?, + (TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b)) + if fn_def_a == fn_def_b => + { + zip_substs( + self, + Some(self.unification_database().fn_def_variance(*fn_def_a)), + sub_a, + sub_b, + )? + } + (TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b)) + if coroutine_a == coroutine_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + ( + TyKind::CoroutineWitness(coroutine_a, sub_a), + TyKind::CoroutineWitness(coroutine_b, sub_b), + ) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b)) + if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders => + { + zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)? + } + (TyKind::Error, TyKind::Error) => (), + (TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution), + _ => (), + } + + Ok(()) + } + + fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_binders( + &mut self, + variance: Variance, + a: &Binders, + b: &Binders, + ) -> chalk_ir::Fallible<()> + where + T: Clone + + HasInterner + + chalk_ir::zip::Zip + + TypeFoldable, + { + chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders()) + } + + fn interner(&self) -> Interner { + Interner + } + + fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { + &self.0 + } +} diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index c3746f78706..22a70f951ea 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -485,6 +485,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.consume_expr(*expr); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -531,6 +532,9 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } + &Expr::Become { expr } => { + self.consume_expr(expr); + } Expr::RecordLit { fields, spread, .. } => { if let &Some(expr) = spread { self.consume_expr(expr); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8b8e97b0081..428ed6748c6 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -502,6 +502,7 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), + &Expr::Become { expr } => self.infer_expr_become(expr), Expr::Yield { expr } => { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { if let Some(expr) = expr { @@ -1084,6 +1085,27 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } + fn infer_expr_become(&mut self, expr: ExprId) -> Ty { + match &self.return_coercion { + Some(return_coercion) => { + let ret_ty = return_coercion.expected_ty(); + + let call_expr_ty = + self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone())); + + // NB: this should *not* coerce. + // tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`). + self.unify(&call_expr_ty, &ret_ty); + } + None => { + // FIXME: diagnose `become` outside of functions + self.infer_expr_no_expect(expr); + } + } + + self.result.standard_types.never.clone() + } + fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty { if let Some(box_id) = self.resolve_boxed_box() { let table = &mut self.table; @@ -1367,6 +1389,7 @@ impl InferenceContext<'_> { ); } } + Statement::Item => (), } } diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 663ea853231..00e5eac229f 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -65,6 +65,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.infer_mut_expr(*expr, Mutability::Not); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -93,6 +94,9 @@ impl InferenceContext<'_> { self.infer_mut_expr(expr, Mutability::Not); } } + Expr::Become { expr } => { + self.infer_mut_expr(*expr, Mutability::Not); + } Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index de23ca34990..709760b64fd 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -74,6 +74,12 @@ impl> Canonicalized { } } +/// Check if types unify. +/// +/// Note that we consider placeholder types to unify with everything. +/// This means that there may be some unresolved goals that actually set bounds for the placeholder +/// type for the types to unify. For example `Option` and `Option` unify although there is +/// unresolved goal `T = U`. pub fn could_unify( db: &dyn HirDatabase, env: Arc, @@ -82,21 +88,35 @@ pub fn could_unify( unify(db, env, tys).is_some() } +/// Check if types unify eagerly making sure there are no unresolved goals. +/// +/// This means that placeholder types are not considered to unify if there are any bounds set on +/// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` +pub fn could_unify_deeply( + db: &dyn HirDatabase, + env: Arc, + tys: &Canonical<(Ty, Ty)>, +) -> bool { + let mut table = InferenceTable::new(db, env); + let vars = make_substitutions(tys, &mut table); + let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); + let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); + let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars); + let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars); + table.resolve_obligations_as_possible(); + table.propagate_diverging_flag(); + let ty1_with_vars = table.resolve_completely(ty1_with_vars); + let ty2_with_vars = table.resolve_completely(ty2_with_vars); + table.unify_deeply(&ty1_with_vars, &ty2_with_vars) +} + pub(crate) fn unify( db: &dyn HirDatabase, env: Arc, tys: &Canonical<(Ty, Ty)>, ) -> Option { let mut table = InferenceTable::new(db, env); - let vars = Substitution::from_iter( - Interner, - tys.binders.iter(Interner).map(|it| match &it.kind { - chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), - // FIXME: maybe wrong? - chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), - chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), - }), - ); + let vars = make_substitutions(tys, &mut table); let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); if !table.unify(&ty1_with_vars, &ty2_with_vars) { @@ -125,6 +145,21 @@ pub(crate) fn unify( )) } +fn make_substitutions( + tys: &chalk_ir::Canonical<(chalk_ir::Ty, chalk_ir::Ty)>, + table: &mut InferenceTable<'_>, +) -> chalk_ir::Substitution { + Substitution::from_iter( + Interner, + tys.binders.iter(Interner).map(|it| match &it.kind { + chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), + // FIXME: maybe wrong? + chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), + chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), + }), + ) +} + bitflags::bitflags! { #[derive(Default, Clone, Copy)] pub(crate) struct TypeVariableFlags: u8 { @@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> { true } + /// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled + pub(crate) fn unify_deeply>(&mut self, ty1: &T, ty2: &T) -> bool { + let result = match self.try_unify(ty1, ty2) { + Ok(r) => r, + Err(_) => return false, + }; + result.goals.iter().all(|goal| { + let canonicalized = self.canonicalize(goal.clone()); + self.try_resolve_obligation(&canonicalized).is_some() + }) + } + /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. pub(crate) fn try_unify>( @@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> { fn register_obligation_in_env(&mut self, goal: InEnvironment) { let canonicalized = self.canonicalize(goal); - if !self.try_resolve_obligation(&canonicalized) { + let solution = self.try_resolve_obligation(&canonicalized); + if matches!(solution, Some(Solution::Ambig(_))) { self.pending_obligations.push(canonicalized); } } @@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> { fn try_resolve_obligation( &mut self, canonicalized: &Canonicalized>, - ) -> bool { + ) -> Option> { let solution = self.db.trait_solve( self.trait_env.krate, self.trait_env.block, canonicalized.value.clone(), ); - match solution { + match &solution { Some(Solution::Unique(canonical_subst)) => { canonicalized.apply_solution( self, Canonical { - binders: canonical_subst.binders, + binders: canonical_subst.binders.clone(), // FIXME: handle constraints - value: canonical_subst.value.subst, + value: canonical_subst.value.subst.clone(), }, ); - true } Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(self, substs); - false + canonicalized.apply_solution(self, substs.clone()); } Some(_) => { // FIXME use this when trying to resolve everything at the end - false } None => { // FIXME obligation cannot be fulfilled => diagnostic - true } } + solution } pub(crate) fn callable_sig( diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs index 5bfe7bf010f..9b1424548c2 100644 --- a/crates/hir-ty/src/layout/target.rs +++ b/crates/hir-ty/src/layout/target.rs @@ -11,10 +11,8 @@ pub fn target_data_layout_query( db: &dyn HirDatabase, krate: CrateId, ) -> Result, Arc> { - let crate_graph = db.crate_graph(); - let res = crate_graph[krate].target_layout.as_deref(); - match res { - Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { + match db.data_layout(krate) { + Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) { Ok(it) => Ok(Arc::new(it)), Err(e) => { Err(match e { @@ -44,6 +42,6 @@ pub fn target_data_layout_query( }.into()) } }, - Err(e) => Err(Arc::from(&**e)), + Err(e) => Err(e), } } diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index ba3dfe8100d..6c1eccb75e6 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,6 +1,7 @@ use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use project_model::target_data_layout::RustcDataLayoutConfig; use rustc_hash::FxHashMap; use test_fixture::WithFixture; use triomphe::Arc; @@ -15,13 +16,18 @@ use crate::{ mod closure; fn current_machine_data_layout() -> String { - project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap() + project_model::target_data_layout::get( + RustcDataLayoutConfig::Rustc(None), + None, + &FxHashMap::default(), + ) + .unwrap() } fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}", ); let (db, file_ids) = TestDB::with_many_files(&ra_fixture); @@ -70,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutErro fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}", ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 70138633341..ec97bdc2c43 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder}; pub use chalk_ext::*; pub use infer::{ closure::{CaptureKind, CapturedItem}, - could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, - InferenceResult, OverloadedDeref, PointerCast, + could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode, + InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast, }; pub use interner::Interner; pub use lower::{ diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 9089c11c5d9..63fa87ad662 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -7,6 +7,7 @@ use std::iter; use hir_def::{DefWithBodyId, HasModule}; use la_arena::ArenaMap; +use rustc_hash::FxHashMap; use stdx::never; use triomphe::Arc; @@ -14,7 +15,7 @@ use crate::{ db::{HirDatabase, InternedClosure}, mir::Operand, utils::ClosureSubst, - ClosureId, Interner, Ty, TyExt, TypeFlags, + ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags, }; use super::{ @@ -36,11 +37,27 @@ pub struct MovedOutOfRef { pub span: MirSpan, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PartiallyMoved { + pub ty: Ty, + pub span: MirSpan, + pub local: LocalId, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BorrowRegion { + pub local: LocalId, + pub kind: BorrowKind, + pub places: Vec, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct BorrowckResult { pub mir_body: Arc, pub mutability_of_locals: ArenaMap, pub moved_out_of_ref: Vec, + pub partially_moved: Vec, + pub borrow_regions: Vec, } fn all_mir_bodies( @@ -80,12 +97,26 @@ pub fn borrowck_query( res.push(BorrowckResult { mutability_of_locals: mutability_of_locals(db, &body), moved_out_of_ref: moved_out_of_ref(db, &body), + partially_moved: partially_moved(db, &body), + borrow_regions: borrow_regions(db, &body), mir_body: body, }); })?; Ok(res.into()) } +fn make_fetch_closure_field( + db: &dyn HirDatabase, +) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ { + |c: ClosureId, subst: &Substitution, f: usize| { + let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); + let infer = db.infer(def); + let (captures, _) = infer.closure_info(&c); + let parent_subst = ClosureSubst(subst).parent_subst(); + captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst) + } +} + fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut result = vec![]; let mut for_operand = |op: &Operand, span: MirSpan| match op { @@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } @@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec result } +fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut result = vec![]; + let mut for_operand = |op: &Operand, span: MirSpan| match op { + Operand::Copy(p) | Operand::Move(p) => { + let mut ty: Ty = body.locals[p.local].ty.clone(); + for proj in p.projection.lookup(&body.projection_store) { + ty = proj.projected_ty( + ty, + db, + make_fetch_closure_field(db), + body.owner.module(db.upcast()).krate(), + ); + } + if !ty.clone().is_copy(db, body.owner) + && !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR) + { + result.push(PartiallyMoved { span, ty, local: p.local }); + } + } + Operand::Constant(_) | Operand::Static(_) => (), + }; + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + match &statement.kind { + StatementKind::Assign(_, r) => match r { + Rvalue::ShallowInitBoxWithAlloc(_) => (), + Rvalue::ShallowInitBox(o, _) + | Rvalue::UnaryOp(_, o) + | Rvalue::Cast(_, o, _) + | Rvalue::Repeat(o, _) + | Rvalue::Use(o) => for_operand(o, statement.span), + Rvalue::CopyForDeref(_) + | Rvalue::Discriminant(_) + | Rvalue::Len(_) + | Rvalue::Ref(_, _) => (), + Rvalue::CheckedBinaryOp(_, o1, o2) => { + for_operand(o1, statement.span); + for_operand(o2, statement.span); + } + Rvalue::Aggregate(_, ops) => { + for op in ops.iter() { + for_operand(op, statement.span); + } + } + }, + StatementKind::FakeRead(_) + | StatementKind::Deinit(_) + | StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Nop => (), + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span), + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { value, .. } => { + for_operand(value, terminator.span); + } + TerminatorKind::Call { func, args, .. } => { + for_operand(func, terminator.span); + args.iter().for_each(|it| for_operand(it, terminator.span)); + } + TerminatorKind::Assert { cond, .. } => { + for_operand(cond, terminator.span); + } + TerminatorKind::Yield { value, .. } => { + for_operand(value, terminator.span); + } + }, + None => (), + } + } + result.shrink_to_fit(); + result +} + +fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut borrows = FxHashMap::default(); + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind { + borrows + .entry(p.local) + .and_modify(|it: &mut BorrowRegion| { + it.places.push(statement.span); + }) + .or_insert_with(|| BorrowRegion { + local: p.local, + kind: *kind, + places: vec![statement.span], + }); + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { .. } => {} + TerminatorKind::Call { .. } => {} + _ => (), + }, + None => (), + } + } + + borrows.into_values().collect() +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum ProjectionCase { /// Projection is a local @@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 1572a6d497c..b038900cdac 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -775,6 +775,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } + Expr::Become { .. } => not_supported!("tail-calls"), Expr::Yield { .. } => not_supported!("yield"), Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { let spread_place = match spread { @@ -1246,7 +1247,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment(current, place, op.into(), expr_id.into()); Ok(Some(current)) } - Expr::Underscore => not_supported!("underscore"), + Expr::Underscore => Ok(Some(current)), } } @@ -1780,6 +1781,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(c, p, expr.into()); current = scope2.pop_and_drop(self, c, expr.into()); } + hir_def::hir::Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs index 1876be303ad..80f92eaf435 100644 --- a/crates/hir-ty/src/tests/diagnostics.rs +++ b/crates/hir-ty/src/tests/diagnostics.rs @@ -1,3 +1,5 @@ +use crate::tests::check_no_mismatches; + use super::check; #[test] @@ -94,3 +96,43 @@ fn test(x: bool) { "#, ); } + +#[test] +fn no_mismatches_on_atpit() { + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait WrappedAssoc { + type Assoc; + fn do_thing(&self) -> Option; +} + +struct Foo; +impl WrappedAssoc for Foo { + type Assoc = impl Sized; + + fn do_thing(&self) -> Option { + Some(()) + } +} +"#, + ); + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait Trait { + type Assoc; + const DEFINE: Option; +} + +impl Trait for () { + type Assoc = impl Sized; + const DEFINE: Option = Option::Some(()); +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 84747822826..6c7dbe1db6f 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3376,11 +3376,8 @@ fn main() { [x,] = &[1,]; //^^^^expected &[i32; 1], got [{unknown}; _] - // FIXME we only want the outermost error, but this matches the current - // behavior of slice patterns let x; [(x,),] = &[(1,),]; - // ^^^^expected {unknown}, got ({unknown},) //^^^^^^^expected &[(i32,); 1], got [{unknown}; _] let x; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 32abbc80c6a..08f7bb14caa 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -31,6 +31,7 @@ mod has_source; pub mod db; pub mod diagnostics; pub mod symbols; +pub mod term_search; mod display; @@ -1084,6 +1085,27 @@ impl Field { Type::new(db, var_id, ty) } + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let var_id = self.parent.into(); + let def_id: AdtId = match self.parent { + VariantDef::Struct(it) => it.id.into(), + VariantDef::Union(it) => it.id.into(), + VariantDef::Variant(it) => it.parent_enum(db).id.into(), + }; + let mut generics = generics.map(|it| it.ty.clone()); + let substs = TyBuilder::subst_for_def(db, def_id, None) + .fill(|x| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build(); + let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); + Type::new(db, var_id, ty) + } + pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( self.ty(db).ty, @@ -1152,6 +1174,10 @@ impl Struct { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.struct_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Struct { @@ -1194,6 +1220,10 @@ impl Union { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.union_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Union { @@ -1269,6 +1299,10 @@ impl Enum { pub fn layout(self, db: &dyn HirDatabase) -> Result { Adt::from(self).layout(db) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Enum { @@ -1344,6 +1378,10 @@ impl Variant { _ => parent_layout, }) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } /// Variants inherit visibility from the parent enum. @@ -1394,9 +1432,9 @@ impl Adt { /// Turns this ADT into a type with the given type parameters. This isn't /// the greatest API, FIXME find a better one. - pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type { + pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator) -> Type { let id = AdtId::from(self); - let mut it = args.iter().map(|t| t.ty.clone()); + let mut it = args.map(|t| t.ty.clone()); let ty = TyBuilder::def_ty(db, id.into(), None) .fill(|x| { let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); @@ -1789,6 +1827,35 @@ impl Function { Type::new_with_resolver_inner(db, &resolver, ty) } + // FIXME: Find better API to also handle const generics + pub fn ret_type_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Type { + let resolver = self.id.resolver(db.upcast()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = + parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build()); + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build(); + + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let ty = callable_sig.ret().clone(); + Type::new_with_resolver_inner(db, &resolver, ty) + } + pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option { if !self.is_async(db) { return None; @@ -1855,6 +1922,51 @@ impl Function { .collect() } + // FIXME: Find better API to also handle const generics + pub fn params_without_self_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Vec { + let environment = db.trait_environment(self.id.into()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let parent_substs = parent_id.map(|id| { + TyBuilder::subst_for_def(db, id, None) + .fill(|x| match x { + ParamKind::Type => generics + .next() + .unwrap_or_else(|| TyKind::Error.intern(Interner)) + .cast(Interner), + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build() + }); + + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs) + .fill(|_| { + let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); + GenericArg::new(Interner, GenericArgData::Ty(ty)) + }) + .build(); + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 }; + callable_sig + .params() + .iter() + .enumerate() + .skip(skip) + .map(|(idx, ty)| { + let ty = Type { env: environment.clone(), ty: ty.clone() }; + Param { func: self, ty, idx } + }) + .collect() + } + pub fn is_const(self, db: &dyn HirDatabase) -> bool { db.function_data(self.id).has_const_kw() } @@ -1889,6 +2001,11 @@ impl Function { db.function_data(self.id).attrs.is_bench() } + /// Is this function marked as unstable with `#[feature]` attribute? + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_unstable() + } + pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { hir_ty::is_fn_unsafe_to_call(db, self.id) } @@ -2052,6 +2169,34 @@ impl SelfParam { let ty = callable_sig.params()[0].clone(); Type { env: environment, ty } } + + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => it.into(), + ItemContainerId::TraitId(it) => it.into(), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { + panic!("Never get here") + } + }; + + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build(); + let substs = + TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build(); + let callable_sig = + db.callable_item_signature(self.func.into()).substitute(Interner, &substs); + let environment = db.trait_environment(self.func.into()); + let ty = callable_sig.params()[0].clone(); + Type { env: environment, ty } + } } impl HasVisibility for Function { @@ -2754,7 +2899,7 @@ impl GenericDef { .collect() } - pub fn type_params(self, db: &dyn HirDatabase) -> Vec { + pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); generics .type_or_consts @@ -3126,12 +3271,16 @@ impl TypeParam { let ty = generic_arg_from_param(db, self.id.into())?; let resolver = self.id.parent().resolver(db.upcast()); match ty.data(Interner) { - GenericArgData::Ty(it) => { + GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => { Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) } _ => None, } } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(GenericParamId::from(self.id).into()).is_unstable() + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] @@ -3241,6 +3390,26 @@ impl TypeOrConstParam { Either::Right(it) => it.ty(db), } } + + pub fn as_type_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { + Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) + } + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None, + } + } + + pub fn as_const_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { + Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) + } + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -3285,12 +3454,11 @@ impl Impl { .filter(filter), ) }); + for id in def_crates .iter() .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db)) .map(|Crate { id }| id) - .chain(def_crates.iter().copied()) - .unique() { all.extend( db.trait_impls_in_crate(id) @@ -3520,7 +3688,7 @@ pub enum CaptureKind { Move, } -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct Type { env: Arc, ty: Ty, @@ -3620,6 +3788,50 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Ref(..)) } + pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool { + return go(db, self.env.krate, &self.ty); + + fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool { + match ty.kind(Interner) { + // Reference itself + TyKind::Ref(_, _, _) => true, + + // For non-phantom_data adts we check variants/fields as well as generic parameters + TyKind::Adt(adt_id, substitution) + if !db.struct_datum(krate, *adt_id).flags.phantom_data => + { + let adt_datum = &db.struct_datum(krate, *adt_id); + let adt_datum_bound = + adt_datum.binders.clone().substitute(Interner, substitution); + adt_datum_bound + .variants + .into_iter() + .flat_map(|variant| variant.fields.into_iter()) + .any(|ty| go(db, krate, &ty)) + || substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)) + } + // And for `PhantomData`, we check `T`. + TyKind::Adt(_, substitution) + | TyKind::Tuple(_, substitution) + | TyKind::OpaqueType(_, substitution) + | TyKind::AssociatedType(_, substitution) + | TyKind::FnDef(_, substitution) => substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)), + + // For `[T]` or `*T` we check `T` + TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty), + + // Consider everything else as not reference + _ => false, + } + } + } + pub fn as_reference(&self) -> Option<(Type, Mutability)> { let (ty, _lt, m) = self.ty.as_reference()?; let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut)); @@ -3727,14 +3939,16 @@ impl Type { ) } + // FIXME: Find better API that also handles const generics pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { let mut it = args.iter().map(|t| t.ty.clone()); let trait_ref = TyBuilder::trait_ref(db, trait_.id) .push(self.ty.clone()) .fill(|x| { - let r = it.next().unwrap(); match x { - ParamKind::Type => r.cast(Interner), + ParamKind::Type => { + it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } ParamKind::Const(ty) => { // FIXME: this code is not covered in tests. unknown_const_as_generic(ty.clone()) @@ -4368,12 +4582,24 @@ impl Type { walk_type(db, self, &mut cb); } - + /// Check if type unifies with another type. + /// + /// Note that we consider placeholder types to unify with everything. + /// For example `Option` and `Option` unify although there is unresolved goal `T = U`. pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); hir_ty::could_unify(db, self.env.clone(), &tys) } + /// Check if type unifies with another type eagerly making sure there are no unresolved goals. + /// + /// This means that placeholder types are not considered to unify if there are any bounds set on + /// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` + pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool { + let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); + hir_ty::could_unify_deeply(db, self.env.clone(), &tys) + } + pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone())); hir_ty::could_coerce(db, self.env.clone(), &tys) diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs new file mode 100644 index 00000000000..72762007dc9 --- /dev/null +++ b/crates/hir/src/term_search.rs @@ -0,0 +1,298 @@ +//! Term search + +use hir_def::type_ref::Mutability; +use hir_ty::db::HirDatabase; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type}; + +mod expr; +pub use expr::Expr; + +mod tactics; + +/// Key for lookup table to query new types reached. +#[derive(Debug, Hash, PartialEq, Eq)] +enum NewTypesKey { + ImplMethod, + StructProjection, +} + +/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many +/// to take into account. +#[derive(Debug)] +enum AlternativeExprs { + /// There are few trees, so we keep track of them all + Few(FxHashSet), + /// There are too many trees to keep track of + Many, +} + +impl AlternativeExprs { + /// Construct alternative trees + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn new(threshold: usize, exprs: impl Iterator) -> AlternativeExprs { + let mut it = AlternativeExprs::Few(Default::default()); + it.extend_with_threshold(threshold, exprs); + it + } + + /// Get type trees stored in alternative trees (or `Expr::Many` in case of many) + /// + /// # Arguments + /// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`) + fn exprs(&self, ty: &Type) -> Vec { + match self { + AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(), + AlternativeExprs::Many => vec![Expr::Many(ty.clone())], + } + } + + /// Extend alternative expressions + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator) { + match self { + AlternativeExprs::Few(tts) => { + for it in exprs { + if tts.len() > threshold { + *self = AlternativeExprs::Many; + break; + } + + tts.insert(it); + } + } + AlternativeExprs::Many => (), + } + } +} + +/// # Lookup table for term search +/// +/// Lookup table keeps all the state during term search. +/// This means it knows what types and how are reachable. +/// +/// The secondary functionality for lookup table is to keep track of new types reached since last +/// iteration as well as keeping track of which `ScopeDef` items have been used. +/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do +/// not produce any new results. +#[derive(Default, Debug)] +struct LookupTable { + /// All the `Expr`s in "value" produce the type of "key" + data: FxHashMap, + /// New types reached since last query by the `NewTypesKey` + new_types: FxHashMap>, + /// ScopeDefs that are not interesting any more + exhausted_scopedefs: FxHashSet, + /// ScopeDefs that were used in current round + round_scopedef_hits: FxHashSet, + /// Amount of rounds since scopedef was first used. + rounds_since_sopedef_hit: FxHashMap, + /// Types queried but not present + types_wishlist: FxHashSet, + /// Threshold to squash trees to `Many` + many_threshold: usize, +} + +impl LookupTable { + /// Initialize lookup table + fn new(many_threshold: usize) -> Self { + let mut res = Self { many_threshold, ..Default::default() }; + res.new_types.insert(NewTypesKey::ImplMethod, Vec::new()); + res.new_types.insert(NewTypesKey::StructProjection, Vec::new()); + res + } + + /// Find all `Expr`s that unify with the `ty` + fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, tts)| tts.exprs(t)) + } + + /// Same as find but automatically creates shared reference of types in the lookup + /// + /// For example if we have type `i32` in data and we query for `&i32` it map all the type + /// trees we have for `i32` with `Expr::Reference` and returns them. + fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, it)| it.exprs(t)) + .or_else(|| { + self.data + .iter() + .find(|(t, _)| { + Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty) + }) + .map(|(t, it)| { + it.exprs(t) + .into_iter() + .map(|expr| Expr::Reference(Box::new(expr))) + .collect() + }) + }) + } + + /// Insert new type trees for type + /// + /// Note that the types have to be the same, unification is not enough as unification is not + /// transitive. For example Vec and FxHashSet both unify with Iterator, + /// but they clearly do not unify themselves. + fn insert(&mut self, ty: Type, exprs: impl Iterator) { + match self.data.get_mut(&ty) { + Some(it) => it.extend_with_threshold(self.many_threshold, exprs), + None => { + self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs)); + for it in self.new_types.values_mut() { + it.push(ty.clone()); + } + } + } + } + + /// Iterate all the reachable types + fn iter_types(&self) -> impl Iterator + '_ { + self.data.keys().cloned() + } + + /// Query new types reached since last query by key + /// + /// Create new key if you wish to query it to avoid conflicting with existing queries. + fn new_types(&mut self, key: NewTypesKey) -> Vec { + match self.new_types.get_mut(&key) { + Some(it) => std::mem::take(it), + None => Vec::new(), + } + } + + /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more + fn mark_exhausted(&mut self, def: ScopeDef) { + self.exhausted_scopedefs.insert(def); + } + + /// Mark `ScopeDef` as used meaning we managed to produce something useful from it + fn mark_fulfilled(&mut self, def: ScopeDef) { + self.round_scopedef_hits.insert(def); + } + + /// Start new round (meant to be called at the beginning of iteration in `term_search`) + /// + /// This functions marks some `ScopeDef`s as exhausted if there have been + /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`. + fn new_round(&mut self) { + for def in &self.round_scopedef_hits { + let hits = + self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0); + const MAX_ROUNDS_AFTER_HIT: u32 = 2; + if *hits > MAX_ROUNDS_AFTER_HIT { + self.exhausted_scopedefs.insert(*def); + } + } + self.round_scopedef_hits.clear(); + } + + /// Get exhausted `ScopeDef`s + fn exhausted_scopedefs(&self) -> &FxHashSet { + &self.exhausted_scopedefs + } + + /// Types queried but not found + fn take_types_wishlist(&mut self) -> FxHashSet { + std::mem::take(&mut self.types_wishlist) + } +} + +/// Context for the `term_search` function +#[derive(Debug)] +pub struct TermSearchCtx<'a, DB: HirDatabase> { + /// Semantics for the program + pub sema: &'a Semantics<'a, DB>, + /// Semantic scope, captures context for the term search + pub scope: &'a SemanticsScope<'a>, + /// Target / expected output type + pub goal: Type, + /// Configuration for term search + pub config: TermSearchConfig, +} + +/// Configuration options for the term search +#[derive(Debug, Clone, Copy)] +pub struct TermSearchConfig { + /// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check + pub enable_borrowcheck: bool, + /// Indicate when to squash multiple trees to `Many` as there are too many to keep track + pub many_alternatives_threshold: usize, + /// Depth of the search eg. number of cycles to run + pub depth: usize, +} + +impl Default for TermSearchConfig { + fn default() -> Self { + Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 } + } +} + +/// # Term search +/// +/// Search for terms (expressions) that unify with the `goal` type. +/// +/// # Arguments +/// * `ctx` - Context for term search +/// +/// Internally this function uses Breadth First Search to find path to `goal` type. +/// The general idea is following: +/// 1. Populate lookup (frontier for BFS) from values (local variables, statics, constants, etc) +/// as well as from well knows values (such as `true/false` and `()`) +/// 2. Iteratively expand the frontier (or contents of the lookup) by trying different type +/// transformation tactics. For example functions take as from set of types (arguments) to some +/// type (return type). Other transformations include methods on type, type constructors and +/// projections to struct fields (field access). +/// 3. Once we manage to find path to type we are interested in we continue for single round to see +/// if we can find more paths that take us to the `goal` type. +/// 4. Return all the paths (type trees) that take us to the `goal` type. +/// +/// Note that there are usually more ways we can get to the `goal` type but some are discarded to +/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through +/// thousands of possible responses so we currently take first 10 from every tactic. +pub fn term_search(ctx: &TermSearchCtx<'_, DB>) -> Vec { + let module = ctx.scope.module(); + let mut defs = FxHashSet::default(); + defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module))); + + ctx.scope.process_all_names(&mut |_, def| { + defs.insert(def); + }); + + let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold); + + // Try trivial tactic first, also populates lookup table + let mut solutions: Vec = tactics::trivial(ctx, &defs, &mut lookup).collect(); + // Use well known types tactic before iterations as it does not depend on other tactics + solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup)); + + for _ in 0..ctx.config.depth { + lookup.new_round(); + + solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup)); + solutions.extend(tactics::free_function(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup)); + solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup)); + + // Discard not interesting `ScopeDef`s for speedup + for def in lookup.exhausted_scopedefs() { + defs.remove(def); + } + } + + solutions.into_iter().filter(|it| !it.is_many()).unique().collect() +} diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs new file mode 100644 index 00000000000..254fbe7e2b5 --- /dev/null +++ b/crates/hir/src/term_search/expr.rs @@ -0,0 +1,468 @@ +//! Type tree for term search + +use hir_def::find_path::PrefixKind; +use hir_expand::mod_path::ModPath; +use hir_ty::{ + db::HirDatabase, + display::{DisplaySourceCodeError, HirDisplay}, +}; +use itertools::Itertools; + +use crate::{ + Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef, + SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, +}; + +/// Helper function to get path to `ModuleDef` +fn mod_item_path( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Option { + let db = sema_scope.db; + // Account for locals shadowing items from module + let name_hit_count = def.name(db).map(|def_name| { + let mut name_hit_count = 0; + sema_scope.process_all_names(&mut |name, _| { + if name == def_name { + name_hit_count += 1; + } + }); + name_hit_count + }); + + let m = sema_scope.module(); + match name_hit_count { + Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude), + Some(_) => m.find_use_path_prefixed( + db.upcast(), + *def, + PrefixKind::ByCrate, + prefer_no_std, + prefer_prelude, + ), + } +} + +/// Helper function to get path to `ModuleDef` as string +fn mod_item_path_str( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude); + path.map(|it| it.display(sema_scope.db.upcast()).to_string()) + .ok_or(DisplaySourceCodeError::PathNotFound) +} + +/// Helper function to get path to `Type` +fn type_path( + sema_scope: &SemanticsScope<'_>, + ty: &Type, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let db = sema_scope.db; + let m = sema_scope.module(); + + match ty.as_adt() { + Some(adt) => { + let ty_name = ty.display_source_code(db, m.id, true)?; + + let mut path = + mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude) + .unwrap(); + path.pop_segment(); + let path = path.display(db.upcast()).to_string(); + let res = match path.is_empty() { + true => ty_name, + false => format!("{path}::{ty_name}"), + }; + Ok(res) + } + None => ty.display_source_code(db, m.id, true), + } +} + +/// Helper function to filter out generic parameters that are default +fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec { + def.type_or_const_params(db) + .into_iter() + .filter_map(|it| it.as_type_param(db)) + .zip(generics) + .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg)) + .map(|(_, arg)| arg.clone()) + .collect() +} + +/// Type tree shows how can we get from set of types to some type. +/// +/// Consider the following code as an example +/// ``` +/// fn foo(x: i32, y: bool) -> Option { None } +/// fn bar() { +/// let a = 1; +/// let b = true; +/// let c: Option = _; +/// } +/// ``` +/// If we generate type tree in the place of `_` we get +/// ```txt +/// Option +/// | +/// foo(i32, bool) +/// / \ +/// a: i32 b: bool +/// ``` +/// So in short it pretty much gives us a way to get type `Option` using the items we have in +/// scope. +#[derive(Debug, Clone, Eq, Hash, PartialEq)] +pub enum Expr { + /// Constant + Const(Const), + /// Static variable + Static(Static), + /// Local variable + Local(Local), + /// Constant generic parameter + ConstParam(ConstParam), + /// Well known type (such as `true` for bool) + FamousType { ty: Type, value: &'static str }, + /// Function call (does not take self param) + Function { func: Function, generics: Vec, params: Vec }, + /// Method call (has self param) + Method { func: Function, generics: Vec, target: Box, params: Vec }, + /// Enum variant construction + Variant { variant: Variant, generics: Vec, params: Vec }, + /// Struct construction + Struct { strukt: Struct, generics: Vec, params: Vec }, + /// Struct field access + Field { expr: Box, field: Field }, + /// Passing type as reference (with `&`) + Reference(Box), + /// Indicates possibility of many different options that all evaluate to `ty` + Many(Type), +} + +impl Expr { + /// Generate source code for type tree. + /// + /// Note that trait imports are not added to generated code. + /// To make sure that the code is valid, callee has to also ensure that all the traits listed + /// by `traits_used` method are also imported. + pub fn gen_source_code( + &self, + sema_scope: &SemanticsScope<'_>, + many_formatter: &mut dyn FnMut(&Type) -> String, + prefer_no_std: bool, + prefer_prelude: bool, + ) -> Result { + let db = sema_scope.db; + let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude); + match self { + Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), + Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), + Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::FamousType { value, .. } => Ok(value.to_string()), + Expr::Function { func, params, .. } => { + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).map(|it| it.container(db)) { + Some(container) => { + let container_name = match container { + crate::AssocItemContainer::Trait(trait_) => { + mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))? + } + crate::AssocItemContainer::Impl(imp) => { + let self_ty = imp.self_ty(db); + // Should it be guaranteed that `mod_item_path` always exists? + match self_ty.as_adt().and_then(|adt| { + mod_item_path( + sema_scope, + &adt.into(), + prefer_no_std, + prefer_prelude, + ) + }) { + Some(path) => path.display(sema_scope.db.upcast()).to_string(), + None => self_ty.display(db).to_string(), + } + } + }; + let fn_name = func.name(db).display(db.upcast()).to_string(); + Ok(format!("{container_name}::{fn_name}({args})")) + } + None => { + let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func))?; + Ok(format!("{fn_name}({args})")) + } + } + } + Expr::Method { func, target, params, .. } => { + if target.contains_many_in_illegal_pos() { + return Ok(many_formatter(&target.ty(db))); + } + + let func_name = func.name(db).display(db.upcast()).to_string(); + let self_param = func.self_param(db).unwrap(); + let target = target.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) { + Some(trait_) => { + let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?; + let target = match self_param.access(db) { + crate::Access::Shared => format!("&{target}"), + crate::Access::Exclusive => format!("&mut {target}"), + crate::Access::Owned => target, + }; + let res = match args.is_empty() { + true => format!("{trait_name}::{func_name}({target})",), + false => format!("{trait_name}::{func_name}({target}, {args})",), + }; + Ok(res) + } + None => Ok(format!("{target}.{func_name}({args})")), + } + } + Expr::Variant { variant, generics, params } => { + let generics = non_default_generics(db, (*variant).into(), generics); + let generics_str = match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }; + let inner = match variant.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|f| { + f.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}({args})") + } + StructKind::Record => { + let fields = variant.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}{{ {args} }}") + } + StructKind::Unit => generics_str, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Struct { strukt, generics, params } => { + let generics = non_default_generics(db, (*strukt).into(), generics); + let inner = match strukt.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|a| { + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("({args})") + } + StructKind::Record => { + let fields = strukt.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!(" {{ {args} }}") + } + StructKind::Unit => match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Field { expr, field } => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let strukt = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let field = field.name(db).display(db.upcast()).to_string(); + Ok(format!("{strukt}.{field}")) + } + Expr::Reference(expr) => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let inner = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + Ok(format!("&{inner}")) + } + Expr::Many(ty) => Ok(many_formatter(ty)), + } + } + + /// Get type of the type tree. + /// + /// Same as getting the type of root node + pub fn ty(&self, db: &dyn HirDatabase) -> Type { + match self { + Expr::Const(it) => it.ty(db), + Expr::Static(it) => it.ty(db), + Expr::Local(it) => it.ty(db), + Expr::ConstParam(it) => it.ty(db), + Expr::FamousType { ty, .. } => ty.clone(), + Expr::Function { func, generics, .. } => { + func.ret_type_with_args(db, generics.iter().cloned()) + } + Expr::Method { func, generics, target, .. } => func.ret_type_with_args( + db, + target.ty(db).type_arguments().chain(generics.iter().cloned()), + ), + Expr::Variant { variant, generics, .. } => { + Adt::from(variant.parent_enum(db)).ty_with_args(db, generics.iter().cloned()) + } + Expr::Struct { strukt, generics, .. } => { + Adt::from(*strukt).ty_with_args(db, generics.iter().cloned()) + } + Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()), + Expr::Reference(it) => it.ty(db), + Expr::Many(ty) => ty.clone(), + } + } + + /// List the traits used in type tree + pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec { + let mut res = Vec::new(); + + if let Expr::Method { func, params, .. } = self { + res.extend(params.iter().flat_map(|it| it.traits_used(db))); + if let Some(it) = func.as_assoc_item(db) { + if let Some(it) = it.container_or_implemented_trait(db) { + res.push(it); + } + } + } + + res + } + + /// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`, + /// `unimplemented` or similar macro + /// + /// Some examples are following + /// ```no_compile + /// macro!().foo + /// macro!().bar() + /// ¯o!() + /// ``` + fn contains_many_in_illegal_pos(&self) -> bool { + match self { + Expr::Method { target, .. } => target.contains_many_in_illegal_pos(), + Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(), + Expr::Reference(target) => target.is_many(), + Expr::Many(_) => true, + _ => false, + } + } + + /// Helper function to check if outermost type tree is `Expr::Many` variant + pub fn is_many(&self) -> bool { + matches!(self, Expr::Many(_)) + } +} diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs new file mode 100644 index 00000000000..666d63ac155 --- /dev/null +++ b/crates/hir/src/term_search/tactics.rs @@ -0,0 +1,859 @@ +//! Tactics for term search +//! +//! All the tactics take following arguments +//! * `ctx` - Context for the term search +//! * `defs` - Set of items in scope at term search target location +//! * `lookup` - Lookup table for types +//! And they return iterator that yields type trees that unify with the `goal` type. + +use std::iter; + +use hir_ty::db::HirDatabase; +use hir_ty::mir::BorrowKind; +use hir_ty::TyBuilder; +use itertools::Itertools; +use rustc_hash::FxHashSet; + +use crate::{ + Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, + TypeParam, Variant, +}; + +use crate::term_search::{Expr, TermSearchConfig}; + +use super::{LookupTable, NewTypesKey, TermSearchCtx}; + +/// # Trivial tactic +/// +/// Attempts to fulfill the goal by trying items in scope +/// Also works as a starting point to move all items in scope to lookup table. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +/// +/// Returns iterator that yields elements that unify with `goal`. +/// +/// _Note that there is no use of calling this tactic in every iteration as the output does not +/// depend on the current state of `lookup`_ +pub(super) fn trivial<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + defs.iter().filter_map(|def| { + let expr = match def { + ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)), + ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)), + ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)), + ScopeDef::Local(it) => { + if ctx.config.enable_borrowcheck { + let borrowck = db.borrowck(it.parent).ok()?; + + let invalid = borrowck.iter().any(|b| { + b.partially_moved.iter().any(|moved| { + Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id) + }) || b.borrow_regions.iter().any(|region| { + // Shared borrows are fine + Some(®ion.local) == b.mir_body.binding_locals.get(it.binding_id) + && region.kind != BorrowKind::Shared + }) + }); + + if invalid { + return None; + } + } + + Some(Expr::Local(*it)) + } + _ => None, + }?; + + lookup.mark_exhausted(*def); + + let ty = expr.ty(db); + lookup.insert(ty.clone(), std::iter::once(expr.clone())); + + // Don't suggest local references as they are not valid for return + if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) { + return None; + } + + ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr) + }) +} + +/// # Type constructor tactic +/// +/// Attempts different type constructors for enums and structs in scope +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn type_constructor<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + fn variant_helper( + db: &dyn HirDatabase, + lookup: &mut LookupTable, + parent_enum: Enum, + variant: Variant, + goal: &Type, + config: &TermSearchConfig, + ) -> Vec<(Type, Vec)> { + // Ignore unstable + if variant.is_unstable(db) { + return Vec::new(); + } + + let generics = GenericDef::from(variant.parent_enum(db)); + let Some(type_params) = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>() + else { + // Ignore enums with const generics + return Vec::new(); + }; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return Vec::new(); + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return Vec::new(); + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + generic_params + .filter_map(move |generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| it.default(db).unwrap_or_else(|| g.next().expect("No generic"))) + .collect(); + + let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) { + return None; + } + + // Ignore types that have something to do with lifetimes + if config.enable_borrowcheck && enum_ty.contains_reference(db) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = variant + .fields(db) + .into_iter() + .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let variant_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Variant { variant, generics: generics.clone(), params }) + .collect() + }; + lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned()); + + Some((enum_ty, variant_exprs)) + }) + .collect() + } + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { + let variant_exprs = + variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config); + if variant_exprs.is_empty() { + return None; + } + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); + Some(variant_exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { + let exprs: Vec<(Type, Vec)> = enum_ + .variants(db) + .into_iter() + .flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.goal, &ctx.config)) + .collect(); + + if !exprs.is_empty() { + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); + } + + Some(exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { + // Ignore unstable and not visible + if it.is_unstable(db) || !it.is_visible_from(db, module) { + return None; + } + + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| { + it.default(db) + .unwrap_or_else(|| g.next().expect("Missing type param")) + }) + .collect(); + + let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if non_default_type_params_len != 0 + && struct_ty.could_unify_with_deeply(db, &ctx.goal) + { + return None; + } + + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { + return None; + } + let fileds = it.fields(db); + // Check if all fields are visible, otherwise we cannot fill them + if fileds.iter().any(|it| !it.is_visible_from(db, module)) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = fileds + .into_iter() + .map(|field| lookup.find(db, &field.ty(db))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let struct_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Struct { + strukt: *it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup + .mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it)))); + lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned()); + + Some((struct_ty, struct_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Free function tactic +/// +/// Attempts to call different functions in scope with parameters from lookup table. +/// Functions that include generics are not used for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn free_function<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Function(it)) => { + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore lifetimes as we do not check them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args(db, generics.iter().cloned()); + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) + || it.is_unsafe_to_call(db) + || it.is_unstable(db) + || ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args(db, generics.iter().cloned()) + .into_iter() + .map(|field| { + let ty = field.ty(); + match ty.is_mutable_reference() { + true => None, + false => lookup.find_autoref(db, ty), + } + }) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: *it, + generics: generics.clone(), + + params, + }) + .collect() + }; + + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Impl method tactic +/// +/// Attempts to to call methods on types from lookup table. +/// This includes both functions from direct impl blocks as well as functions from traits. +/// Methods defined in impl blocks that are generic and methods that are themselves have +/// generics are ignored for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::ImplMethod) + .into_iter() + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() { + return None; + } + + // Ignore functions without self param + if !it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + if ty.could_unify_with_deeply(db, &ret_ty) { + return None; + } + + let self_ty = it + .self_param(db) + .expect("No self param") + .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned())); + + // Ignore functions that have different self type + if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { + return None; + } + + let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + let fn_exprs: Vec = std::iter::once(target_type_exprs) + .chain(param_exprs) + .multi_cartesian_product() + .map(|params| { + let mut params = params.into_iter(); + let target = Box::new(params.next().unwrap()); + Expr::Method { + func: it, + generics: generics.clone(), + target, + params: params.collect(), + } + }) + .collect(); + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Struct projection tactic +/// +/// Attempts different struct fields (`foo.bar.baz`) +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn struct_projection<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::StructProjection) + .into_iter() + .map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup"))) + .flat_map(move |(ty, targets)| { + ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| { + if !field.is_visible_from(db, module) { + return None; + } + let exprs = targets + .clone() + .into_iter() + .map(move |target| Expr::Field { field, expr: Box::new(target) }); + Some((filed_ty, exprs)) + }) + }) + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Famous types tactic +/// +/// Attempts different values of well known types such as `true` or `false`. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// _Note that there is no point of calling it iteratively as the output is always the same_ +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn famous_types<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + [ + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, + ] + .into_iter() + .map(|exprs| { + lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); + exprs + }) + .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) +} + +/// # Impl static method (without self type) tactic +/// +/// Attempts different functions from impl blocks that take no self parameter. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_static_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .take_types_wishlist() + .into_iter() + .chain(iter::once(ctx.goal.clone())) + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .filter(|(_, imp)| !imp.is_unsafe(db)) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() + || !imp_generics.lifetime_params(db).is_empty() + { + return None; + } + + // Ignore functions with self param + if it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 1 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + it.trait_bounds(db) + .into_iter() + .all(|bound| generic.impls_trait(db, bound, &[])); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + // if ty.could_unify_with_deeply(db, &ret_ty) { + // return None; + // } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index 204e796fa2c..589591a6777 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -79,7 +79,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); @@ -131,7 +131,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 24094de22c8..5f7350bc281 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -1,8 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ - ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, - ted, AstNode, SyntaxKind, + ast::{ + self, + edit_in_place::{HasVisibilityEdit, Indent}, + make, HasGenericParams, HasName, + }, + ted::{self, Position}, + AstNode, SyntaxKind, T, }; // NOTES : @@ -44,7 +49,7 @@ use syntax::{ // }; // } // -// trait ${0:TraitName} { +// trait ${0:NewTrait} { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -53,7 +58,7 @@ use syntax::{ // const_maker! {i32, 7} // } // -// impl ${0:TraitName} for Foo { +// impl ${0:NewTrait} for Foo { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -94,8 +99,10 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { + let impl_ast = builder.make_mut(impl_ast); let trait_items = assoc_items.clone_for_update(); - let impl_items = assoc_items.clone_for_update(); + let impl_items = builder.make_mut(assoc_items); + let impl_name = builder.make_mut(impl_name); trait_items.assoc_items().for_each(|item| { strip_body(&item); @@ -112,46 +119,42 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ impl_ast.generic_param_list(), impl_ast.where_clause(), trait_items, - ); + ) + .clone_for_update(); + + let trait_name = trait_ast.name().expect("new trait should have a name"); + let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update(); // Change `impl Foo` to `impl NewTrait for Foo` - let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { - genpars.to_generic_args().to_string() - } else { - "".to_owned() - }; + let mut elements = vec![ + trait_name_ref.syntax().clone().into(), + make::tokens::single_space().into(), + make::token(T![for]).into(), + ]; - if let Some(snippet_cap) = ctx.config.snippet_cap { - builder.replace_snippet( - snippet_cap, - impl_name.syntax().text_range(), - format!("${{0:TraitName}}{} for {}", arg_list, impl_name), - ); - - // Insert trait before TraitImpl - builder.insert_snippet( - snippet_cap, - impl_ast.syntax().text_range().start(), - format!( - "{}\n\n{}", - trait_ast.to_string().replace("NewTrait", "${0:TraitName}"), - IndentLevel::from_node(impl_ast.syntax()) - ), - ); - } else { - builder.replace( - impl_name.syntax().text_range(), - format!("NewTrait{} for {}", arg_list, impl_name), - ); - - // Insert trait before TraitImpl - builder.insert( - impl_ast.syntax().text_range().start(), - format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())), - ); + if let Some(params) = impl_ast.generic_param_list() { + let gen_args = ¶ms.to_generic_args().clone_for_update(); + elements.insert(1, gen_args.syntax().clone().into()); } - builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); + ted::insert_all(Position::before(impl_name.syntax()), elements); + + // Insert trait before TraitImpl + ted::insert_all_raw( + Position::before(impl_ast.syntax()), + vec![ + trait_ast.syntax().clone().into(), + make::tokens::whitespace(&format!("\n\n{}", impl_ast.indent_level())).into(), + ], + ); + + // Link the trait name & trait ref names together as a placeholder snippet group + if let Some(cap) = ctx.config.snippet_cap { + builder.add_placeholder_snippet_group( + cap, + vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], + ); + } }, ); @@ -160,23 +163,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { - match item { - ast::AssocItem::Const(c) => { - if let Some(vis) = c.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::Fn(f) => { - if let Some(vis) = f.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::TypeAlias(t) => { - if let Some(vis) = t.visibility() { - ted::remove(vis.syntax()); - } - } - _ => (), + if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + has_vis.set_visibility(None); } } @@ -404,12 +392,12 @@ impl F$0oo { r#" struct Foo([i32; N]); -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST: usize = N * 4; } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST: usize = N * 4; } diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs new file mode 100644 index 00000000000..51a1a406f31 --- /dev/null +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -0,0 +1,253 @@ +//! Term search assist +use hir::term_search::TermSearchCtx; +use ide_db::{ + assists::{AssistId, AssistKind, GroupLabel}, + famous_defs::FamousDefs, +}; + +use itertools::Itertools; +use syntax::{ast, AstNode}; + +use crate::assist_context::{AssistContext, Assists}; + +pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let unexpanded = ctx.find_node_at_offset::()?; + let syntax = unexpanded.syntax(); + let goal_range = syntax.text_range(); + + let parent = syntax.parent()?; + let scope = ctx.sema.scope(&parent)?; + + let macro_call = ctx.sema.resolve_macro_call(&unexpanded)?; + + let famous_defs = FamousDefs(&ctx.sema, scope.krate()); + let std_todo = famous_defs.core_macros_todo()?; + let std_unimplemented = famous_defs.core_macros_unimplemented()?; + + if macro_call != std_todo && macro_call != std_unimplemented { + return None; + } + + let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted(); + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: target_ty, + config: Default::default(), + }; + let paths = hir::term_search::term_search(&term_search_ctx); + + if paths.is_empty() { + return None; + } + + let mut formatter = |_: &hir::Type| String::from("todo!()"); + + let paths = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique(); + + for code in paths { + acc.add_group( + &GroupLabel(String::from("Term search")), + AssistId("term_search", AssistKind::Generate), + format!("Replace todo!() with {code}"), + goal_range, + |builder| { + builder.replace(goal_range, code); + }, + ); + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_complete_local() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_todo_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_struct_field() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#, + r#"struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, + ) + } + + #[test] + fn test_enum_with_generics() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented, option +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = None; }"#, + ) + } + + #[test] + fn test_enum_with_generics2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics3() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics4() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#, + ); + + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = a; }"#, + ) + } + + #[test] + fn test_newtype() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#, + r#"struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#, + ) + } + + #[test] + fn test_shadowing() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#, + ) + } + + #[test] + fn test_famous_bool() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: bool = todo$0!(); }"#, + r#"fn f() { let a: bool = false; }"#, + ) + } + + #[test] + fn test_fn_with_reference_types() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = f(&a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = f(a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types3() { + check_assist_not_applicable( + term_search, + r#"//- minicore: todo, unimplemented + fn f(a: &i32) -> f32 { a as f32 } + fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#, + ) + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 2fec104323d..dcc89014b95 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -60,11 +60,6 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod assist_config; mod assist_context; #[cfg(test)] @@ -210,6 +205,7 @@ mod handlers { mod replace_turbofish_with_explicit_type; mod sort_items; mod split_import; + mod term_search; mod toggle_ignore; mod unmerge_match_arm; mod unmerge_use; @@ -332,6 +328,7 @@ mod handlers { replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, + term_search::term_search, toggle_ignore::toggle_ignore, unmerge_match_arm::unmerge_match_arm, unmerge_use::unmerge_use, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 8ad735d0ae8..268ba3225b6 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1665,7 +1665,7 @@ macro_rules! const_maker { }; } -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; @@ -1674,7 +1674,7 @@ trait ${0:TraitName} { const_maker! {i32, 7} } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index ba3c0cf3fd6..1ea7220960d 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -40,7 +40,8 @@ use crate::{ literal::{render_struct_literal, render_variant_lit}, macro_::render_macro, pattern::{render_struct_pat, render_variant_pat}, - render_field, render_path_resolution, render_pattern_resolution, render_tuple_field, + render_expr, render_field, render_path_resolution, render_pattern_resolution, + render_tuple_field, type_alias::{render_type_alias, render_type_alias_with_eq}, union_literal::render_union_literal, RenderContext, @@ -157,6 +158,12 @@ impl Completions { item.add_to(self, ctx.db); } + pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) { + if let Some(item) = render_expr(ctx, expr) { + item.add_to(self, ctx.db) + } + } + pub(crate) fn add_crate_roots( &mut self, ctx: &CompletionContext<'_>, @@ -694,6 +701,7 @@ pub(super) fn complete_name_ref( match &path_ctx.kind { PathKind::Expr { expr_ctx } => { expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx); + expr::complete_expr(acc, ctx); dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 77fd5dd98b8..802e9bc3a80 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -328,3 +328,59 @@ pub(crate) fn complete_expr_path( } } } + +pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) { + let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered(); + + if !ctx.config.enable_term_search { + return; + } + + if !ctx.qualifier_ctx.none() { + return; + } + + if let Some(ty) = &ctx.expected_type { + // Ignore unit types as they are not very interesting + if ty.is_unit() || ty.is_unknown() { + return; + } + + let term_search_ctx = hir::term_search::TermSearchCtx { + sema: &ctx.sema, + scope: &ctx.scope, + goal: ty.clone(), + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: false, + many_alternatives_threshold: 1, + depth: 6, + }, + }; + let exprs = hir::term_search::term_search(&term_search_ctx); + for expr in exprs { + // Expand method calls + match expr { + hir::term_search::Expr::Method { func, generics, target, params } + if target.is_many() => + { + let target_ty = target.ty(ctx.db); + let term_search_ctx = + hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx }; + let target_exprs = hir::term_search::term_search(&term_search_ctx); + + for expr in target_exprs { + let expanded_expr = hir::term_search::Expr::Method { + func, + generics: generics.clone(), + target: Box::new(expr), + params: params.clone(), + }; + + acc.add_expr(ctx, &expanded_expr) + } + } + _ => acc.add_expr(ctx, &expr), + } + } + } +} diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index b9f91d34b2c..3bc329ecd74 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -238,6 +238,8 @@ fn import_on_the_fly( (PathKind::Type { location }, ItemInNs::Types(ty)) => { if matches!(location, TypeLocation::TypeBound) { matches!(ty, ModuleDef::Trait(_)) + } else if matches!(location, TypeLocation::ImplTrait) { + matches!(ty, ModuleDef::Trait(_) | ModuleDef::Module(_)) } else { true } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index e6a4335c3fe..e4678089462 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -31,6 +31,11 @@ pub(crate) fn complete_type_path( ScopeDef::ImplSelfType(_) => location.complete_self_type(), // Don't suggest attribute macros and derives. ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), + ScopeDef::ModuleDef(Trait(_) | Module(_)) + if matches!(location, TypeLocation::ImplTrait) => + { + true + } // Type things are fine ScopeDef::ModuleDef( BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), @@ -184,6 +189,21 @@ pub(crate) fn complete_type_path( } } } + TypeLocation::ImplTrait => { + acc.add_nameref_keywords_with_colon(ctx); + ctx.process_all_names(&mut |name, def, doc_aliases| { + let is_trait_or_module = matches!( + def, + ScopeDef::ModuleDef( + hir::ModuleDef::Module(_) | hir::ModuleDef::Trait(_) + ) + ); + if is_trait_or_module { + acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); + } + }); + return; + } _ => {} }; diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index ed5ddde8fbf..04563fb0f46 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -14,6 +14,7 @@ pub struct CompletionConfig { pub enable_imports_on_the_fly: bool, pub enable_self_on_the_fly: bool, pub enable_private_editable: bool, + pub enable_term_search: bool, pub full_function_signatures: bool, pub callable: Option, pub snippet_cap: Option, diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2a0004f60b8..aa22155feff 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -202,6 +202,7 @@ impl TypeLocation { } TypeLocation::AssocConstEq => false, TypeLocation::AssocTypeEq => true, + TypeLocation::ImplTrait => false, _ => true, } } @@ -716,7 +717,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel(); + let toolchain = db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 8552a20392a..c2c0641961a 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -166,6 +166,8 @@ pub struct CompletionRelevance { pub postfix_match: Option, /// This is set for type inference results pub is_definite: bool, + /// This is set for items that are function (associated or method) + pub function: Option, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -207,6 +209,24 @@ pub enum CompletionRelevancePostfixMatch { Exact, } +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub struct CompletionRelevanceFn { + pub has_params: bool, + pub has_self_param: bool, + pub return_type: CompletionRelevanceReturnType, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum CompletionRelevanceReturnType { + Other, + /// Returns the Self type of the impl/trait + DirectConstructor, + /// Returns something that indirectly constructs the `Self` type of the impl/trait e.g. `Result`, `Option` + Constructor, + /// Returns a possible builder for the type + Builder, +} + impl CompletionRelevance { /// Provides a relevance score. Higher values are more relevant. /// @@ -231,6 +251,7 @@ impl CompletionRelevance { postfix_match, is_definite, is_item_from_notable_trait, + function, } = self; // lower rank private things @@ -275,6 +296,33 @@ impl CompletionRelevance { if is_definite { score += 10; } + + score += function + .map(|asf| { + let mut fn_score = match asf.return_type { + CompletionRelevanceReturnType::DirectConstructor => 15, + CompletionRelevanceReturnType::Builder => 10, + CompletionRelevanceReturnType::Constructor => 5, + CompletionRelevanceReturnType::Other => 0, + }; + + // When a fn is bumped due to return type: + // Bump Constructor or Builder methods with no arguments, + // over them tha with self arguments + if fn_score > 0 { + if !asf.has_params { + // bump associated functions + fn_score += 1; + } else if asf.has_self_param { + // downgrade methods (below Constructor) + fn_score = 1; + } + } + + fn_score + }) + .unwrap_or_default(); + score } @@ -297,6 +345,7 @@ pub enum CompletionItemKind { Method, Snippet, UnresolvedReference, + Expression, } impl_from!(SymbolKind for CompletionItemKind); @@ -341,6 +390,7 @@ impl CompletionItemKind { CompletionItemKind::Method => "me", CompletionItemKind::Snippet => "sn", CompletionItemKind::UnresolvedReference => "??", + CompletionItemKind::Expression => "ex", } } } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 2ed080a8347..3f374b307fb 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -17,7 +17,7 @@ use ide_db::{ imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; +use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; use text_edit::TextEdit; use crate::{ @@ -272,6 +272,82 @@ pub(crate) fn render_resolution_with_import_pat( Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution)) } +pub(crate) fn render_expr( + ctx: &CompletionContext<'_>, + expr: &hir::term_search::Expr, +) -> Option { + let mut i = 1; + let mut snippet_formatter = |ty: &hir::Type| { + let arg_name = ty + .as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("_")); + let res = format!("${{{i}:{arg_name}}}"); + i += 1; + res + }; + + let mut label_formatter = |ty: &hir::Type| { + ty.as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("...")) + }; + + let label = expr + .gen_source_code( + &ctx.scope, + &mut label_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok()?; + + let source_range = match ctx.original_token.parent() { + Some(node) => match node.ancestors().find_map(ast::Path::cast) { + Some(path) => path.syntax().text_range(), + None => node.text_range(), + }, + None => ctx.source_range(), + }; + + let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone()); + + let snippet = format!( + "{}$0", + expr.gen_source_code( + &ctx.scope, + &mut snippet_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude + ) + .ok()? + ); + let edit = TextEdit::replace(source_range, snippet); + item.snippet_edit(ctx.config.snippet_cap?, edit); + item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); + item.set_relevance(crate::CompletionRelevance { + type_match: compute_type_match(ctx, &expr.ty(ctx.db)), + ..Default::default() + }); + for trait_ in expr.traits_used(ctx.db) { + let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_)); + let Some(path) = ctx.module.find_use_path( + ctx.db, + trait_item, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) else { + continue; + }; + + item.add_import(LocatedImport::new(path, trait_item, trait_item)); + } + + Some(item) +} + fn scope_def_to_name( resolution: ScopeDef, ctx: &RenderContext<'_>, @@ -599,6 +675,16 @@ mod tests { expect.assert_debug_eq(&actual); } + #[track_caller] + fn check_function_relevance(ra_fixture: &str, expect: Expect) { + let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method) + .into_iter() + .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) + .collect(); + + expect.assert_debug_eq(&actual); + } + #[track_caller] fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); @@ -961,6 +1047,7 @@ fn func(input: Struct) { } st Self [type] sp Self [type] st Struct [type] + ex Struct [type] lc self [local] fn func(…) [] me self.test() [] @@ -985,6 +1072,9 @@ fn main() { "#, expect![[r#" lc input [type+name+local] + ex input [type] + ex true [type] + ex false [type] lc inputbad [local] fn main() [] fn test(…) [] @@ -1174,6 +1264,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1201,6 +1292,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1280,6 +1372,7 @@ fn foo() { A { the$0 } } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -1313,6 +1406,26 @@ impl S { documentation: Documentation( "Method docs", ), + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, CompletionItem { label: "foo", @@ -1418,6 +1531,26 @@ fn foo(s: S) { s.$0 } kind: Method, lookup: "the_method", detail: "fn(&self)", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, ] "#]], @@ -1665,6 +1798,10 @@ fn f() { A { bar: b$0 }; } expect![[r#" fn bar() [type+name] fn baz() [type] + ex baz() [type] + ex bar() [type] + ex A { bar: baz() }.bar [type] + ex A { bar: bar() }.bar [type] st A [] fn f() [] "#]], @@ -1749,6 +1886,8 @@ fn main() { lc s [type+name+local] st S [type] st S [type] + ex s [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1766,6 +1905,8 @@ fn main() { lc ssss [type+local] st S [type] st S [type] + ex ssss [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1798,6 +1939,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] lc m [local] lc t [local] lc &t [type+local] @@ -1846,6 +1989,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify] + ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] lc m [local] lc t [local] lc &mut t [type+local] @@ -1894,6 +2039,8 @@ fn bar(t: Foo) {} ev Foo::A [type] ev Foo::B [type] en Foo [type] + ex Foo::A [type] + ex Foo::B [type] fn bar(…) [] fn foo() [] "#]], @@ -1947,6 +2094,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] st S [] st &S [type] st S [] @@ -2002,6 +2151,254 @@ fn main() { ); } + #[test] + fn constructor_order_simple() { + check_relevance( + r#" +struct Foo; +struct Other; +struct Option(T); + +impl Foo { + fn fn_ctr() -> Foo { unimplemented!() } + fn fn_another(n: u32) -> Other { unimplemented!() } + fn fn_ctr_self() -> Option { unimplemented!() } +} + +fn test() { + let a = Foo::$0; +} +"#, + expect![[r#" + fn fn_ctr() [type_could_unify] + fn fn_ctr_self() [type_could_unify] + fn fn_another(…) [type_could_unify] + "#]], + ); + } + + #[test] + fn constructor_order_kind() { + check_function_relevance( + r#" +struct Foo; +struct Bar; +struct Option(T); +enum Result { Ok(T), Err(E) }; + +impl Foo { + fn fn_ctr(&self) -> Foo { unimplemented!() } + fn fn_ctr_with_args(&self, n: u32) -> Foo { unimplemented!() } + fn fn_another(&self, n: u32) -> Bar { unimplemented!() } + fn fn_ctr_wrapped(&self, ) -> Option { unimplemented!() } + fn fn_ctr_wrapped_2(&self, ) -> Result { unimplemented!() } + fn fn_ctr_wrapped_3(&self, ) -> Result { unimplemented!() } // Self is not the first type + fn fn_ctr_wrapped_with_args(&self, m: u32) -> Option { unimplemented!() } + fn fn_another_unit(&self) { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + expect![[r#" + [ + ( + "fn(&self, u32) -> Bar", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self)", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self, u32) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self, u32) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ] + "#]], + ); + } + + #[test] + fn constructor_order_relevance() { + check_relevance( + r#" +struct Foo; +struct FooBuilder; +struct Result(T); + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: u32) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Result { unimplemented!() } + fn fn_other() -> Result { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + // preference: + // Direct Constructor + // Direct Constructor with args + // Builder + // Constructor + // Others + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + + // + } + + #[test] + fn function_relevance_generic_1() { + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_returns_unit(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr_wrapped() -> Option { unimplemented!() } + fn fn_ctr_wrapped_2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr_wrapped() [type_could_unify] + fn fn_ctr_wrapped_2() [type_could_unify] + me fn_returns_unit(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + + #[test] + fn function_relevance_generic_2() { + // Generic 2 + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Option { unimplemented!() } + fn fn_ctr2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a : Res> = Foo::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + fn fn_ctr2() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + #[test] fn struct_field_method_ref() { check_kinds( @@ -2022,6 +2419,26 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } kind: Method, lookup: "baz", detail: "fn(&self) -> u32", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, ref_match: "&@107", }, CompletionItem { @@ -2096,6 +2513,7 @@ fn foo() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -2133,6 +2551,26 @@ fn main() { ), lookup: "foo", detail: "fn() -> S", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: false, + has_self_param: false, + return_type: Other, + }, + ), + }, ref_match: "&@92", }, ] @@ -2160,6 +2598,7 @@ fn foo() { "#, expect![[r#" lc foo [type+local] + ex foo [type] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] en Foo [type_could_unify] @@ -2493,6 +2932,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, CompletionItem { @@ -2515,6 +2955,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 27186a2b7ff..cf9fe1ab307 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -8,8 +8,13 @@ use syntax::{format_smolstr, AstNode, SmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, - item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance}, - render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext}, + item::{ + Builder, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevanceFn, + CompletionRelevanceReturnType, + }, + render::{ + compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext, + }, CallableSnippets, }; @@ -61,9 +66,9 @@ fn render( ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; - + let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( - if func.self_param(db).is_some() { + if has_self_param { CompletionItemKind::Method } else { CompletionItemKind::SymbolKind(SymbolKind::Function) @@ -99,6 +104,15 @@ fn render( .filter(|_| !has_call_parens) .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); + let function = assoc_item + .and_then(|assoc_item| assoc_item.implementing_ty(db)) + .map(|self_type| compute_return_type_match(db, &ctx, self_type, &ret_type)) + .map(|return_type| CompletionRelevanceFn { + has_params: has_self_param || func.num_params(db) > 0, + has_self_param, + return_type, + }); + item.set_relevance(CompletionRelevance { type_match: if has_call_parens || complete_call_parens.is_some() { compute_type_match(completion, &ret_type) @@ -106,6 +120,7 @@ fn render( compute_type_match(completion, &func.ty(db)) }, exact_name_match: compute_exact_name_match(completion, &call), + function, is_op_method, is_item_from_notable_trait, ..ctx.completion_relevance() @@ -156,6 +171,33 @@ fn render( item } +fn compute_return_type_match( + db: &dyn HirDatabase, + ctx: &RenderContext<'_>, + self_type: hir::Type, + ret_type: &hir::Type, +) -> CompletionRelevanceReturnType { + if match_types(ctx.completion, &self_type, ret_type).is_some() { + // fn([..]) -> Self + CompletionRelevanceReturnType::DirectConstructor + } else if ret_type + .type_arguments() + .any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some()) + { + // fn([..]) -> Result OR Wrapped + CompletionRelevanceReturnType::Constructor + } else if ret_type + .as_adt() + .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) + .unwrap_or(false) + { + // fn([..]) -> [..]Builder + CompletionRelevanceReturnType::Builder + } else { + CompletionRelevanceReturnType::Other + } +} + pub(super) fn add_call_parens<'b>( builder: &'b mut Builder, ctx: &CompletionContext<'_>, diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 154b69875ae..1f032c7df48 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -65,6 +65,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: false, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 78907a2896c..7749fac40b9 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) { kw unsafe kw while kw while let + ex ifletlocal + ex letlocal + ex matcharm + ex param1 + ex param2 "#]], ); } @@ -241,6 +246,8 @@ fn complete_in_block() { sn macro_rules sn pd sn ppd + ex false + ex true "#]], ) } @@ -542,7 +549,26 @@ fn quux(x: i32) { m!(x$0 } "#, - expect![[r#""#]], + expect![[r#" + fn quux(…) fn(i32) + lc x i32 + lc y i32 + ma m!(…) macro_rules! m + bt u32 u32 + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], ); } @@ -682,7 +708,9 @@ fn main() { } "#, expect![[r#" - fn test() fn() -> Zulu + fn test() fn() -> Zulu + ex Zulu + ex Zulu::test() "#]], ); } diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index eaa1bebc03c..fff193ba4c9 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -1397,3 +1397,22 @@ pub use bridge2::server2::Span2; "#]], ); } + +#[test] +fn flyimport_only_traits_in_impl_trait_block() { + check( + r#" +//- /main.rs crate:main deps:dep +pub struct Bar; + +impl Foo$0 for Bar { } +//- /lib.rs crate:dep +pub trait FooTrait; + +pub struct FooStruct; +"#, + expect![[r#" + tt FooTrait (use dep::FooTrait) + "#]], + ); +} diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 18afde1b7ce..e64ec74c610 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -192,6 +192,8 @@ fn main() { bt u32 u32 kw crate:: kw self:: + ex Foo::default() + ex foo "#]], ); check( diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index a87d16c789f..ff32eccfbff 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -225,10 +225,10 @@ impl S { fn foo() { let _ = lib::S::$0 } "#, expect![[r#" - ct PUBLIC_CONST pub const PUBLIC_CONST: u32 - fn public_method() fn() - ta PublicType pub type PublicType = u32 - "#]], + ct PUBLIC_CONST pub const PUBLIC_CONST: u32 + fn public_method() fn() + ta PublicType pub type PublicType = u32 + "#]], ); } @@ -242,8 +242,8 @@ impl U { fn m() { } } fn foo() { let _ = U::$0 } "#, expect![[r#" - fn m() fn() - "#]], + fn m() fn() + "#]], ); } @@ -256,8 +256,8 @@ trait Trait { fn m(); } fn foo() { let _ = Trait::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -273,8 +273,8 @@ impl Trait for S {} fn foo() { let _ = S::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -290,8 +290,8 @@ impl Trait for S {} fn foo() { let _ = ::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -396,9 +396,9 @@ macro_rules! foo { () => {} } fn main() { let _ = crate::$0 } "#, expect![[r#" - fn main() fn() - ma foo!(…) macro_rules! foo - "#]], + fn main() fn() + ma foo!(…) macro_rules! foo + "#]], ); } @@ -694,8 +694,10 @@ fn bar() -> Bar { } "#, expect![[r#" - fn foo() (as Foo) fn() -> Self - "#]], + fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() + "#]], ); } @@ -722,6 +724,8 @@ fn bar() -> Bar { expect![[r#" fn bar() fn() fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } @@ -748,6 +752,8 @@ fn bar() -> Bar { "#, expect![[r#" fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index c7161f82ce7..db4ac9381ce 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -989,3 +989,43 @@ fn foo<'a>() { S::<'static, F$0, _, _>; } "#]], ); } + +#[test] +fn complete_traits_on_impl_trait_block() { + check( + r#" +trait Foo {} + +struct Bar; + +impl $0 for Bar { } +"#, + expect![[r#" + md module + tt Foo + tt Trait + kw crate:: + kw self:: + "#]], + ); +} + +#[test] +fn complete_traits_with_path_on_impl_trait_block() { + check( + r#" +mod outer { + pub trait Foo {} + pub struct Bar; + pub mod inner { + } +} + +impl outer::$0 for Bar { } +"#, + expect![[r#" + md inner + tt Foo + "#]], + ); +} diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs index 4edfa37b329..3106772e63b 100644 --- a/crates/ide-db/src/famous_defs.rs +++ b/crates/ide-db/src/famous_defs.rs @@ -114,6 +114,14 @@ impl FamousDefs<'_, '_> { self.find_function("core:mem:drop") } + pub fn core_macros_todo(&self) -> Option { + self.find_macro("core:todo") + } + + pub fn core_macros_unimplemented(&self) -> Option { + self.find_macro("core:unimplemented") + } + pub fn builtin_crates(&self) -> impl Iterator { IntoIterator::into_iter([ self.std(), diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 3862acc2af4..7e1811b4cac 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -148,7 +148,7 @@ impl<'a> PathTransform<'a> { let mut defaulted_params: Vec = Default::default(); self.generic_def .into_iter() - .flat_map(|it| it.type_params(db)) + .flat_map(|it| it.type_or_const_params(db)) .skip(skip) // The actual list of trait type parameters may be longer than the one // used in the `impl` block due to trailing default type parameters. diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 032b8338ab8..6a7042988a9 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -71,7 +71,6 @@ impl Definition { &self, sema: &Semantics<'_, RootDatabase>, new_name: &str, - rename_external: bool, ) -> Result { // self.krate() returns None if // self is a built-in attr, built-in type or tool module. @@ -80,8 +79,8 @@ impl Definition { if let Some(krate) = self.krate(sema.db) { // Can we not rename non-local items? // Then bail if non-local - if !rename_external && !krate.origin(sema.db).is_local() { - bail!("Cannot rename a non-local definition as the config for it is disabled") + if !krate.origin(sema.db).is_local() { + bail!("Cannot rename a non-local definition") } } diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index 73be6a4071e..f59d8d08c89 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -138,7 +138,7 @@ impl SnippetEdit { .into_iter() .zip(1..) .with_position() - .map(|pos| { + .flat_map(|pos| { let (snippet, index) = match pos { (itertools::Position::First, it) | (itertools::Position::Middle, it) => it, // last/only snippet gets index 0 @@ -146,11 +146,13 @@ impl SnippetEdit { | (itertools::Position::Only, (snippet, _)) => (snippet, 0), }; - let range = match snippet { - Snippet::Tabstop(pos) => TextRange::empty(pos), - Snippet::Placeholder(range) => range, - }; - (index, range) + match snippet { + Snippet::Tabstop(pos) => vec![(index, TextRange::empty(pos))], + Snippet::Placeholder(range) => vec![(index, range)], + Snippet::PlaceholderGroup(ranges) => { + ranges.into_iter().map(|range| (index, range)).collect() + } + } }) .collect_vec(); @@ -248,7 +250,7 @@ impl SourceChangeBuilder { fn commit(&mut self) { let snippet_edit = self.snippet_builder.take().map(|builder| { SnippetEdit::new( - builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(), + builder.places.into_iter().flat_map(PlaceSnippet::finalize_position).collect(), ) }); @@ -287,30 +289,10 @@ impl SourceChangeBuilder { pub fn insert(&mut self, offset: TextSize, text: impl Into) { self.edit.insert(offset, text.into()) } - /// Append specified `snippet` at the given `offset` - pub fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.insert(offset, snippet); - } /// Replaces specified `range` of text with a given string. pub fn replace(&mut self, range: TextRange, replace_with: impl Into) { self.edit.replace(range, replace_with.into()) } - /// Replaces specified `range` of text with a given `snippet`. - pub fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.replace(range, snippet); - } pub fn replace_ast(&mut self, old: N, new: N) { algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) } @@ -356,6 +338,17 @@ impl SourceChangeBuilder { self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } + /// Adds a snippet to move the cursor selected over `nodes` + /// + /// This allows for renaming newly generated items without having to go + /// through a separate rename step. + pub fn add_placeholder_snippet_group(&mut self, _cap: SnippetCap, nodes: Vec) { + assert!(nodes.iter().all(|node| node.parent().is_some())); + self.add_snippet(PlaceSnippet::OverGroup( + nodes.into_iter().map(|node| node.into()).collect(), + )) + } + fn add_snippet(&mut self, snippet: PlaceSnippet) { let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] }); snippet_builder.places.push(snippet); @@ -400,6 +393,13 @@ pub enum Snippet { Tabstop(TextSize), /// A placeholder snippet (e.g. `${0:placeholder}`). Placeholder(TextRange), + /// A group of placeholder snippets, e.g. + /// + /// ```no_run + /// let ${0:new_var} = 4; + /// fun(1, 2, 3, ${0:new_var}); + /// ``` + PlaceholderGroup(Vec), } enum PlaceSnippet { @@ -409,14 +409,20 @@ enum PlaceSnippet { After(SyntaxElement), /// Place a placeholder snippet in place of the element Over(SyntaxElement), + /// Place a group of placeholder snippets which are linked together + /// in place of the elements + OverGroup(Vec), } impl PlaceSnippet { - fn finalize_position(self) -> Snippet { + fn finalize_position(self) -> Vec { match self { - PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()), - PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()), - PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()), + PlaceSnippet::Before(it) => vec![Snippet::Tabstop(it.text_range().start())], + PlaceSnippet::After(it) => vec![Snippet::Tabstop(it.text_range().end())], + PlaceSnippet::Over(it) => vec![Snippet::Placeholder(it.text_range())], + PlaceSnippet::OverGroup(it) => { + vec![Snippet::PlaceholderGroup(it.into_iter().map(|it| it.text_range()).collect())] + } } } } diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index e4e735cecd8..4f706e26af2 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -329,6 +329,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::RecordExpr(_) | ast::Expr::RefExpr(_) | ast::Expr::ReturnExpr(_) + | ast::Expr::BecomeExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::LetExpr(_) diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index dd64b93e454..5e2541795ca 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option {} + // ^^^^ error: expected (bool, bool), found bool + // ^^^^^ error: expected (bool, bool), found bool + None => {} + } +} + "#, + ); + } + #[test] fn mismatched_types_in_or_patterns() { cov_mark::check_count!(validate_match_bailed_out, 2); diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index a0d5d742d36..b7667dc318f 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -182,6 +182,18 @@ fn foo() -> u8 { ); } + #[test] + fn no_diagnostic_if_not_last_statement2() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2; + fn bar() {} +} +"#, + ); + } + #[test] fn replace_with_expr() { check_fix( diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index e93eea8ce29..8c97281b783 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -112,7 +112,8 @@ fn add_missing_ok_or_some( let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" }; - let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]); + let wrapped_actual_ty = + expected_adt.ty_with_args(ctx.sema.db, std::iter::once(d.actual.clone())); if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) { return None; diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 6441343ebac..56c8181e84c 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -1,14 +1,20 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind}; +use hir::{ + db::ExpandDatabase, + term_search::{term_search, TermSearchCtx}, + ClosureStyle, HirDisplay, +}; use ide_db::{ assists::{Assist, AssistId, AssistKind, GroupLabel}, label::Label, source_change::SourceChange, }; -use syntax::AstNode; +use itertools::Itertools; use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use syntax::AstNode; + // Diagnostic: typed-hole // // This diagnostic is triggered when an underscore expression is used in an invalid position. @@ -36,50 +42,54 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option let (original_range, _) = d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; - let mut assists = vec![]; - scope.process_all_names(&mut |name, def| { - let ty = match def { - hir::ScopeDef::ModuleDef(it) => match it { - hir::ModuleDef::Function(it) => it.ty(db), - hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Const(it) => it.ty(db), - hir::ModuleDef::Static(it) => it.ty(db), - _ => return, - }, - hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db), - hir::ScopeDef::Local(it) => it.ty(db), - _ => return, - }; - // FIXME: should also check coercions if it is at a coercion site - if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) { - assists.push(Assist { - id: AssistId("typed-hole", AssistKind::QuickFix), - label: Label::new(format!("Replace `_` with `{}`", name.display(db))), - group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())), - target: original_range.range, - source_change: Some(SourceChange::from_text_edit( - original_range.file_id, - TextEdit::replace(original_range.range, name.display(db).to_string()), - )), - trigger_signature_help: false, - }); - } - }); - if assists.is_empty() { - None - } else { + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: d.expected.clone(), + config: Default::default(), + }; + let paths = term_search(&term_search_ctx); + + let mut formatter = |_: &hir::Type| String::from("_"); + + let assists: Vec = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique() + .map(|code| Assist { + id: AssistId("typed-hole", AssistKind::QuickFix), + label: Label::new(format!("Replace `_` with `{}`", &code)), + group: Some(GroupLabel("Replace `_` with a term".to_owned())), + target: original_range.range, + source_change: Some(SourceChange::from_text_edit( + original_range.file_id, + TextEdit::replace(original_range.range, code), + )), + trigger_signature_help: false, + }) + .collect(); + + if !assists.is_empty() { Some(assists) + } else { + None } } #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fixes}; + use crate::tests::{ + check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix, + }; #[test] fn unknown() { @@ -99,7 +109,7 @@ fn main() { r#" fn main() { if _ {} - //^ error: invalid `_` expression, expected type `bool` + //^ 💡 error: invalid `_` expression, expected type `bool` let _: fn() -> i32 = _; //^ error: invalid `_` expression, expected type `fn() -> i32` let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion* @@ -129,7 +139,7 @@ fn main() { fn main() { let mut x = t(); x = _; - //^ 💡 error: invalid `_` expression, expected type `&str` + //^ error: invalid `_` expression, expected type `&str` x = ""; } fn t() -> T { loop {} } @@ -143,7 +153,8 @@ fn t() -> T { loop {} } r#" fn main() { let _x = [(); _]; - let _y: [(); 10] = [(); _]; + // FIXME: This should trigger error + // let _y: [(); 10] = [(); _]; _ = 0; (_,) = (1,); } @@ -153,7 +164,7 @@ fn main() { #[test] fn check_quick_fix() { - check_fixes( + check_fixes_unordered( r#" enum Foo { Bar @@ -173,6 +184,18 @@ enum Foo { } use Foo::Bar; const C: Foo = Foo::Bar; +fn main(param: Foo) { + let local = Foo::Bar; + let _: Foo = Bar; + //^ error: invalid `_` expression, expected type `fn()` +} +"#, + r#" +enum Foo { + Bar +} +use Foo::Bar; +const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; let _: Foo = local; @@ -209,18 +232,6 @@ enum Foo { } use Foo::Bar; const C: Foo = Foo::Bar; -fn main(param: Foo) { - let local = Foo::Bar; - let _: Foo = Bar; - //^ error: invalid `_` expression, expected type `fn()` -} -"#, - r#" -enum Foo { - Bar -} -use Foo::Bar; -const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; let _: Foo = C; @@ -230,4 +241,153 @@ fn main(param: Foo) { ], ); } + + #[test] + fn local_item_use_trait() { + check_has_fix( + r#" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + _$0 +} +"#, + r" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + Foo::foo(a) +} +", + ); + } + + #[test] + fn init_struct() { + check_has_fix( + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = _$0; +}"#, + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = Qwe { a: a, b: Abc { } }; +}"#, + ); + } + + #[test] + fn ignore_impl_func_with_incorrect_return() { + check_has_single_fix( + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = Bar { }; +}"#, + ); + } + + #[test] + fn use_impl_func_with_correct_return() { + check_has_fix( + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = Foo::foo(&a); +}"#, + ); + } + + #[test] + fn local_shadow_fn() { + check_fixes_unordered( + r#" +fn f() { + let f: i32 = 0; + _$0 +}"#, + vec![ + r#" +fn f() { + let f: i32 = 0; + () +}"#, + r#" +fn f() { + let f: i32 = 0; + crate::f() +}"#, + ], + ); + } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index b62bb5affdd..4e4a851f67e 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -91,6 +91,91 @@ fn check_nth_fix_with_config( assert_eq_text!(&after, &actual); } +pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) { + for ra_fixture_after in ra_fixtures_after.iter() { + check_has_fix(ra_fixture_before, ra_fixture_after) + } +} + +#[track_caller] +pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); +} + +#[track_caller] +pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let mut n_fixes = 0; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); + assert!(n_fixes == 1, "Too many fixes suggested"); +} + /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index dbe6a5507cc..18821bd78bf 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -501,7 +501,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); + let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 30bfe6ee9dc..69ddc1e45ef 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -7263,8 +7263,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6157..6365, - focus_range: 6222..6228, + full_range: 6290..6498, + focus_range: 6355..6361, name: "Future", kind: Trait, container_name: "future", @@ -7277,8 +7277,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6995..7461, - focus_range: 7039..7047, + full_range: 7128..7594, + focus_range: 7172..7180, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index effdbf2c1f0..3238887257a 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -12,11 +12,6 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - #[cfg(test)] mod fixture; @@ -258,11 +253,11 @@ impl Analysis { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("Analysis::from_single_file has no target layout".into()), - None, ); change.change_file(file_id, Some(Arc::from(text))); change.set_crate_graph(crate_graph); + change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); + change.set_toolchains(vec![None]); host.apply_change(change); (host.analysis(), file_id) } @@ -680,9 +675,8 @@ impl Analysis { &self, position: FilePosition, new_name: &str, - rename_external: bool, ) -> Cancellable> { - self.with_db(|db| rename::rename(db, position, new_name, rename_external)) + self.with_db(|db| rename::rename(db, position, new_name)) } pub fn prepare_rename( diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 413dbf9c5df..f67aea2d5b9 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -54,7 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { db.relevant_crates(file_id) .iter() diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 9fce4bb0f82..f2eedfa4316 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -84,7 +84,6 @@ pub(crate) fn rename( db: &RootDatabase, position: FilePosition, new_name: &str, - rename_external: bool, ) -> RenameResult { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); @@ -104,7 +103,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(); @@ -123,9 +122,9 @@ pub(crate) fn will_rename_file( let module = sema.to_module_def(file_id)?; let def = Definition::Module(module); let mut change = if is_raw_identifier(new_name_stem) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem]), true).ok()? + def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? } else { - def.rename(&sema, new_name_stem, true).ok()? + def.rename(&sema, new_name_stem).ok()? }; change.file_system_edits.clear(); Some(change) @@ -377,16 +376,11 @@ mod tests { use super::{RangeInfo, RenameError}; fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after, true); + check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after); } #[track_caller] - fn check_with_rename_config( - new_name: &str, - ra_fixture_before: &str, - ra_fixture_after: &str, - rename_external: bool, - ) { + fn check_with_rename_config(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); if !ra_fixture_after.starts_with("error: ") { @@ -395,7 +389,7 @@ mod tests { } } let rename_result = analysis - .rename(position, new_name, rename_external) + .rename(position, new_name) .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { @@ -426,10 +420,8 @@ mod tests { fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let source_change = analysis - .rename(position, new_name, true) - .unwrap() - .expect("Expect returned a RenameError"); + let source_change = + analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) } @@ -2636,19 +2628,7 @@ pub struct S; //- /main.rs crate:main deps:lib new_source_root:local use lib::S$0; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, - ); - - check( - "Baz", - r#" -//- /lib.rs crate:lib new_source_root:library -pub struct S; -//- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; -"#, - "use lib::Baz;\n", + "error: Cannot rename a non-local definition", ); } @@ -2663,8 +2643,7 @@ use core::hash::Hash; #[derive(H$0ash)] struct A; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, + "error: Cannot rename a non-local definition", ); } diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index bf6ad47a495..453d1836e16 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -39,8 +39,6 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.env.clone(), data.is_proc_macro, data.origin.clone(), - data.target_layout.clone(), - data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index dee5afbf8d9..5feaf21aa97 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,14 +1,16 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, helpers::get_definition, FxHashMap, FxHashSet, RootDatabase, }; -use syntax::{AstNode, SyntaxKind::*, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; use crate::navigation_target::UpmappingResult; @@ -22,7 +24,7 @@ use crate::{ /// A static representation of fully analyzed source code. /// -/// The intended use-case is powering read-only code browsers and emitting LSIF +/// The intended use-case is powering read-only code browsers and emitting LSIF/SCIP. #[derive(Debug)] pub struct StaticIndex<'a> { pub files: Vec, @@ -40,6 +42,7 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { + pub documentation: Option, pub hover: Option, pub definition: Option, pub references: Vec, @@ -103,6 +106,19 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } +fn documentation_for_definition( + sema: &Semantics<'_, RootDatabase>, + def: Definition, + scope_node: &SyntaxNode, +) -> Option { + let famous_defs = match &def { + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + _ => None, + }; + + def.docs(sema.db, famous_defs.as_ref()) +} + impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { let current_crate = crates_for(self.db, file_id).pop().map(Into::into); @@ -169,6 +185,7 @@ impl StaticIndex<'_> { *it } else { let it = self.tokens.insert(TokenStaticData { + documentation: documentation_for_definition(&sema, def, &node), hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 3321a0513b6..c3d85e38936 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -72,8 +72,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, - target_layout, - toolchain, } = &crate_graph[crate_id]; format_to!( buf, @@ -91,12 +89,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Env: {:?}\n", env); format_to!(buf, " Origin: {:?}\n", origin); format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); - format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); - format_to!( - buf, - " Workspace Toolchain: {}\n", - toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) - ); let deps = dependencies .iter() .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index c6dc071c394..8c5592da63e 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -2,7 +2,7 @@ //! for incorporating changes. // Note, don't remove any public api from this. This API is consumed by external tools // to run rust-analyzer as a library. -use std::{collections::hash_map::Entry, mem, path::Path, sync}; +use std::{collections::hash_map::Entry, iter, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; use hir_expand::proc_macro::{ @@ -18,7 +18,6 @@ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; use span::Span; -use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -68,9 +67,9 @@ pub fn load_workspace( let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), + .and_then(|it| ProcMacroServer::spawn(it, extra_env).map_err(Into::into)), ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path.clone()).map_err(Into::into) + ProcMacroServer::spawn(path.clone(), extra_env).map_err(Into::into) } ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), }; @@ -107,7 +106,7 @@ pub fn load_workspace( .collect() }; - let project_folders = ProjectFolders::new(&[ws], &[]); + let project_folders = ProjectFolders::new(std::slice::from_ref(&ws), &[]); loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![], @@ -115,6 +114,7 @@ pub fn load_workspace( }); let host = load_crate_graph( + &ws, crate_graph, proc_macros, project_folders.source_root_config, @@ -273,7 +273,7 @@ impl SourceRootConfig { pub fn load_proc_macro( server: &ProcMacroServer, path: &AbsPath, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacroLoadResult { let res: Result, String> = (|| { let dylib = MacroDylib::new(path.to_path_buf()); @@ -283,7 +283,7 @@ pub fn load_proc_macro( } Ok(vec .into_iter() - .map(|expander| expander_to_proc_macro(expander, dummy_replace)) + .map(|expander| expander_to_proc_macro(expander, ignored_macros)) .collect()) })(); match res { @@ -302,6 +302,7 @@ pub fn load_proc_macro( } fn load_crate_graph( + ws: &ProjectWorkspace, crate_graph: CrateGraph, proc_macros: ProcMacros, source_root_config: SourceRootConfig, @@ -340,8 +341,17 @@ fn load_crate_graph( let source_roots = source_root_config.partition(vfs); analysis_change.set_roots(source_roots); + let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); + if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws + { + analysis_change.set_target_data_layouts( + iter::repeat(target_layout.clone()).take(num_crates).collect(), + ); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); + } host.apply_change(analysis_change); host @@ -349,7 +359,7 @@ fn load_crate_graph( fn expander_to_proc_macro( expander: proc_macro_api::ProcMacro, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacro { let name = From::from(expander.name()); let kind = match expander.kind() { @@ -357,16 +367,8 @@ fn expander_to_proc_macro( proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, }; - let expander: sync::Arc = - if dummy_replace.iter().any(|replace| **replace == name) { - match kind { - ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), - _ => sync::Arc::new(EmptyExpander), - } - } else { - sync::Arc::new(Expander(expander)) - }; - ProcMacro { name, kind, expander } + let disabled = ignored_macros.iter().any(|replace| **replace == name); + ProcMacro { name, kind, expander: sync::Arc::new(Expander(expander)), disabled } } #[derive(Debug)] @@ -391,42 +393,6 @@ impl ProcMacroExpander for Expander { } } -/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct IdentityExpander; - -impl ProcMacroExpander for IdentityExpander { - fn expand( - &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - _: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(subtree.clone()) - } -} - -/// Empty expander, used for proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct EmptyExpander; - -impl ProcMacroExpander for EmptyExpander { - fn expand( - &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - call_site: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) - } -} - #[cfg(test)] mod tests { use ide_db::base_db::SourceDatabase; diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 9291f799cca..6d3055da286 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -101,10 +101,20 @@ impl Bindings { }))) } MetaVarKind::Lifetime => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("'missing"), - span, - }))) + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(span), + token_trees: Box::new([ + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + char: '\'', + span, + spacing: tt::Spacing::Joint, + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_static("missing"), + span, + })), + ]), + })) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index bfc5d197f68..3c270e30a9b 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -700,10 +700,12 @@ impl SynToken { } impl SrcToken, S> for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { + fn kind(&self, _ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Punct { token, offset: i } => { + SyntaxKind::from_char(token.text().chars().nth(*i).unwrap()).unwrap() + } SynToken::Leaf(_) => { never!(); SyntaxKind::ERROR diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index f40c515fa07..6b660180f82 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -678,27 +678,38 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { attributes::outer_attrs(p); match p.current() { - IDENT | INT_NUMBER => { + IDENT | INT_NUMBER if p.nth_at(1, T![::]) => { // test_err record_literal_missing_ellipsis_recovery // fn main() { // S { S::default() } // } - if p.nth_at(1, T![::]) { - m.abandon(p); - p.expect(T![..]); - expr(p); - } else { + m.abandon(p); + p.expect(T![..]); + expr(p); + } + IDENT | INT_NUMBER => { + if p.nth_at(1, T![..]) { // test_err record_literal_before_ellipsis_recovery // fn main() { // S { field ..S::default() } // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.error("expected `:`"); + } else { + // test_err record_literal_field_eq_recovery + // fn main() { + // S { field = foo } + // } + if p.nth_at(1, T![:]) { name_ref_or_index(p); - p.expect(T![:]); + p.bump(T![:]); + } else if p.nth_at(1, T![=]) { + name_ref_or_index(p); + p.err_and_bump("expected `:`"); } expr(p); - m.complete(p, RECORD_EXPR_FIELD); } + m.complete(p, RECORD_EXPR_FIELD); } T![.] if p.at(T![..]) => { m.abandon(p); diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 4197f248e0a..48600641ad0 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -58,6 +58,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![match], T![move], T![return], + T![become], T![static], T![try], T![unsafe], @@ -102,6 +103,7 @@ pub(super) fn atom_expr( T![try] => try_block_expr(p, None), T![match] => match_expr(p), T![return] => return_expr(p), + T![become] => become_expr(p), T![yield] => yield_expr(p), T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), @@ -621,6 +623,18 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, RETURN_EXPR) } +// test become_expr +// fn foo() { +// become foo(); +// } +fn become_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![become])); + let m = p.start(); + p.bump(T![become]); + expr(p); + m.complete(p, BECOME_EXPR) +} + // test yield_expr // fn foo() { // yield; diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 3c577aa3cb4..4498daf21a3 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -157,6 +157,16 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); p.expect(T![const]); } + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + T![const] => { + p.bump_any(); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + T![async] => { + p.bump_any(); + } _ => (), } if paths::is_use_path_start(p) { diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 39ded41bb24..50367423379 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -323,6 +323,15 @@ fn record_pat_field(p: &mut Parser<'_>) { p.bump(T![:]); pattern(p); } + // test_err record_pat_field_eq_recovery + // fn main() { + // let S { field = foo }; + // } + IDENT | INT_NUMBER if p.nth(1) == T![=] => { + name_ref_or_index(p); + p.err_and_bump("expected `:`"); + pattern(p); + } T![box] => { // FIXME: not all box patterns should be allowed box_pat(p); diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 4b589037672..6ecfdc9f466 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -90,6 +90,7 @@ pub enum SyntaxKind { PUB_KW, REF_KW, RETURN_KW, + BECOME_KW, SELF_KW, SELF_TYPE_KW, STATIC_KW, @@ -195,6 +196,7 @@ pub enum SyntaxKind { BLOCK_EXPR, STMT_LIST, RETURN_EXPR, + BECOME_EXPR, YIELD_EXPR, YEET_EXPR, LET_EXPR, @@ -307,6 +309,7 @@ impl SyntaxKind { | PUB_KW | REF_KW | RETURN_KW + | BECOME_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW @@ -425,6 +428,7 @@ impl SyntaxKind { "pub" => PUB_KW, "ref" => REF_KW, "return" => RETURN_KW, + "become" => BECOME_KW, "self" => SELF_KW, "Self" => SELF_TYPE_KW, "static" => STATIC_KW, @@ -496,4 +500,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast index f511960040d..741b7845e7f 100644 --- a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast @@ -24,26 +24,26 @@ SOURCE_FILE RECORD_EXPR_FIELD NAME_REF IDENT "field" - WHITESPACE " " - RANGE_EXPR - DOT2 ".." - CALL_EXPR - PATH_EXPR - PATH - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" - COLON2 "::" - PATH_SEGMENT - NAME_REF - IDENT "default" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + WHITESPACE " " + DOT2 ".." + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 25: expected COLON +error 25: expected `:` +error 25: expected COMMA diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast new file mode 100644 index 00000000000..ad4deeb0b67 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast @@ -0,0 +1,41 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 26: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs new file mode 100644 index 00000000000..1eb1aa9b926 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + S { field = foo } +} diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast new file mode 100644 index 00000000000..6940a84b683 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 30: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs new file mode 100644 index 00000000000..c4949d6e12e --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + let S { field = foo }; +} diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast new file mode 100644 index 00000000000..c544cf4e5e3 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + BECOME_EXPR + BECOME_KW "become" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs new file mode 100644 index 00000000000..918a83ca6e8 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs @@ -0,0 +1,3 @@ +fn foo() { + become foo(); +} diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast new file mode 100644 index 00000000000..ebf758286a7 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "async_foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Fn" + PARAM_LIST + L_PAREN "(" + PARAM + REF_TYPE + AMP "&" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + R_PAREN ")" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs new file mode 100644 index 00000000000..04d44175d77 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs @@ -0,0 +1 @@ +fn async_foo(_: impl async Fn(&i32)) {} diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast new file mode 100644 index 00000000000..646873881bc --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast @@ -0,0 +1,34 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + CONST_KW "const" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs new file mode 100644 index 00000000000..8eb8f84c91f --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl const Trait) {} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1dadfc40ac4..6b16711a8d8 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -13,6 +13,7 @@ mod version; use indexmap::IndexSet; use paths::AbsPathBuf; +use rustc_hash::FxHashMap; use span::Span; use std::{ fmt, io, @@ -107,8 +108,11 @@ pub struct MacroPanic { impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. - pub fn spawn(process_path: AbsPathBuf) -> io::Result { - let process = ProcMacroProcessSrv::run(process_path)?; + pub fn spawn( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { + let process = ProcMacroProcessSrv::run(process_path, env)?; Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) }) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 96f97bf5e20..12eafcea442 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -7,6 +7,7 @@ use std::{ }; use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ @@ -26,9 +27,12 @@ pub(crate) struct ProcMacroProcessSrv { } impl ProcMacroProcessSrv { - pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { + pub(crate) fn run( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { let create_srv = |null_stderr| { - let mut process = Process::run(process_path.clone(), null_stderr)?; + let mut process = Process::run(process_path.clone(), env, null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { @@ -147,8 +151,12 @@ struct Process { } impl Process { - fn run(path: AbsPathBuf, null_stderr: bool) -> io::Result { - let child = JodChild(mk_child(&path, null_stderr)?); + fn run( + path: AbsPathBuf, + env: &FxHashMap, + null_stderr: bool, + ) -> io::Result { + let child = JodChild(mk_child(&path, env, null_stderr)?); Ok(Process { child }) } @@ -161,9 +169,14 @@ impl Process { } } -fn mk_child(path: &AbsPath, null_stderr: bool) -> io::Result { +fn mk_child( + path: &AbsPath, + env: &FxHashMap, + null_stderr: bool, +) -> io::Result { let mut cmd = Command::new(path.as_os_str()); - cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") + cmd.envs(env) + .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }); diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index ba17ea6f7b4..bd7a3165458 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -29,6 +29,7 @@ paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true +ra-ap-rustc_lexer.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 460a96c07f3..831632c64c0 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -20,6 +20,11 @@ extern crate proc_macro; #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_lexer as rustc_lexer; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_lexer; + mod dylib; mod proc_macros; mod server; diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 8a9d52a37a2..c6a0a666555 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -70,11 +70,58 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 15a9e0deae4..7e9d8057ac9 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -62,11 +62,58 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 87d832cc76f..e5bfe5ee92c 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,8 +169,8 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, - expect![[r#" + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 PUNCH , [alone] 1 @@ -181,8 +181,12 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL "hello bridge" 1"#]], - expect![[r#" + LITERAL ""hello bridge"" 1 + PUNCH , [alone] 1 + LITERAL ""suffixed""suffix 1 + PUNCH , [alone] 1 + LITERAL r##"r##"raw"##"## 1"###]], + expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -193,7 +197,11 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index a2c9856a3f7..ab72f1fba09 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -20,10 +20,11 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use serde::Deserialize; +use toolchain::Tool; use crate::{ cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, - InvocationStrategy, Package, + InvocationStrategy, Package, Sysroot, TargetKind, }; #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -61,6 +62,7 @@ impl WorkspaceBuildScripts { config: &CargoConfig, allowed_features: &FxHashSet, workspace_root: &AbsPathBuf, + sysroot: Option<&Sysroot>, ) -> io::Result { let mut cmd = match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { @@ -69,7 +71,8 @@ impl WorkspaceBuildScripts { cmd } _ => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&config.extra_args); @@ -133,6 +136,7 @@ impl WorkspaceBuildScripts { workspace: &CargoWorkspace, progress: &dyn Fn(String), toolchain: &Option, + sysroot: Option<&Sysroot>, ) -> io::Result { const RUST_1_62: Version = Version::new(1, 62, 0); @@ -151,6 +155,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?, workspace, current_dir, @@ -165,6 +170,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?; cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; @@ -194,7 +200,7 @@ impl WorkspaceBuildScripts { )) } }; - let cmd = Self::build_command(config, &Default::default(), workspace_root)?; + let cmd = Self::build_command(config, &Default::default(), workspace_root, None)?; // NB: Cargo.toml could have been modified between `cargo metadata` and // `cargo check`. We shouldn't assume that package ids we see here are // exactly those from `config`. @@ -415,6 +421,7 @@ impl WorkspaceBuildScripts { rustc: &CargoWorkspace, current_dir: &AbsPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Self { let mut bs = WorkspaceBuildScripts::default(); for p in rustc.packages() { @@ -422,7 +429,8 @@ impl WorkspaceBuildScripts { } let res = (|| { let target_libdir = (|| { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(current_dir) @@ -431,7 +439,8 @@ impl WorkspaceBuildScripts { if let Ok(it) = utf8_stdout(cargo_config) { return Ok(it); } - let mut cmd = Command::new(toolchain::rustc()); + let mut cmd = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "target-libdir"]); utf8_stdout(cmd) @@ -458,7 +467,11 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package.targets.iter().any(|&it| rustc[it].is_proc_macro) { + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { if let Some((_, path)) = proc_macro_dylibs .iter() .find(|(name, _)| *name.trim_start_matches("lib") == package.name) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index a99ee6e664c..08d86fd7b0f 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -12,8 +12,9 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use serde_json::from_value; +use toolchain::Tool; -use crate::{utf8_stdout, InvocationLocation, ManifestPath}; +use crate::{utf8_stdout, InvocationLocation, ManifestPath, Sysroot}; use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo @@ -188,8 +189,6 @@ pub struct TargetData { pub root: AbsPathBuf, /// Kind of target pub kind: TargetKind, - /// Is this target a proc-macro - pub is_proc_macro: bool, /// Required features of the target without which it won't build pub required_features: Vec, } @@ -198,7 +197,10 @@ pub struct TargetData { pub enum TargetKind { Bin, /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). - Lib, + Lib { + /// Is this target a proc-macro + is_proc_macro: bool, + }, Example, Test, Bench, @@ -215,8 +217,8 @@ impl TargetKind { "bench" => TargetKind::Bench, "example" => TargetKind::Example, "custom-build" => TargetKind::BuildScript, - "proc-macro" => TargetKind::Lib, - _ if kind.contains("lib") => TargetKind::Lib, + "proc-macro" => TargetKind::Lib { is_proc_macro: true }, + _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false }, _ => continue, }; } @@ -236,12 +238,13 @@ impl CargoWorkspace { cargo_toml: &ManifestPath, current_dir: &AbsPath, config: &CargoConfig, + sysroot: Option<&Sysroot>, progress: &dyn Fn(String), ) -> anyhow::Result { - let targets = find_list_of_build_targets(config, cargo_toml); + let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let mut meta = MetadataCommand::new(); - meta.cargo_path(toolchain::cargo()); + meta.cargo_path(Tool::Cargo.path()); meta.manifest_path(cargo_toml.to_path_buf()); match &config.features { CargoFeatures::All => { @@ -289,6 +292,7 @@ impl CargoWorkspace { (|| -> Result { let mut command = meta.cargo_command(); + Sysroot::set_rustup_toolchain_env(&mut command, sysroot); command.envs(&config.extra_env); let output = command.output()?; if !output.status.success() { @@ -368,7 +372,6 @@ impl CargoWorkspace { name, root: AbsPathBuf::assert(src_path.into()), kind: TargetKind::new(&kind), - is_proc_macro: *kind == ["proc-macro"], required_features, }); pkg_data.targets.push(tgt); @@ -476,24 +479,30 @@ impl CargoWorkspace { } } -fn find_list_of_build_targets(config: &CargoConfig, cargo_toml: &ManifestPath) -> Vec { +fn find_list_of_build_targets( + config: &CargoConfig, + cargo_toml: &ManifestPath, + sysroot: Option<&Sysroot>, +) -> Vec { if let Some(target) = &config.target { return [target.into()].to_vec(); } - let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env); + let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env, sysroot); if !build_targets.is_empty() { return build_targets; } - rustc_discover_host_triple(cargo_toml, &config.extra_env).into_iter().collect() + rustc_discover_host_triple(cargo_toml, &config.extra_env, sysroot).into_iter().collect() } fn rustc_discover_host_triple( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Option { - let mut rustc = Command::new(toolchain::rustc()); + let mut rustc = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut rustc, sysroot); rustc.envs(extra_env); rustc.current_dir(cargo_toml.parent()).arg("-vV"); tracing::debug!("Discovering host platform by {:?}", rustc); @@ -519,8 +528,10 @@ fn rustc_discover_host_triple( fn cargo_config_build_target( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Vec { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index cf3231498f3..fba0aaa8ce9 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -49,7 +49,7 @@ //! user explores them belongs to that extension (it's totally valid to change //! rust-project.json over time via configuration request!) -use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition}; +use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition}; use la_arena::RawIdx; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -135,7 +135,6 @@ impl ProjectJson { Dependency::new( dep_data.name, CrateId::from_raw(RawIdx::from(dep_data.krate as u32)), - DependencyKind::Normal, ) }) .collect::>(), diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 0aee002fbb3..1ad6e7255bf 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -8,17 +8,13 @@ use rustc_hash::FxHashMap; use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath, Sysroot}; /// Determines how `rustc --print cfg` is discovered and invoked. -/// -/// There options are supported: -/// - [`RustcCfgConfig::Cargo`], which relies on `cargo rustc --print cfg` -/// and `RUSTC_BOOTSTRAP`. -/// - [`RustcCfgConfig::Explicit`], which uses an explicit path to the `rustc` -/// binary in the sysroot. -/// - [`RustcCfgConfig::Discover`], which uses [`toolchain::rustc`]. pub(crate) enum RustcCfgConfig<'a> { - Cargo(&'a ManifestPath), - Explicit(&'a Sysroot), - Discover, + /// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), } pub(crate) fn get( @@ -71,9 +67,10 @@ fn get_rust_cfgs( extra_env: &FxHashMap, config: RustcCfgConfig<'_>, ) -> anyhow::Result { - let mut cmd = match config { - RustcCfgConfig::Cargo(cargo_toml) => { - let mut cmd = Command::new(toolchain::cargo()); + let sysroot = match config { + RustcCfgConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) @@ -82,25 +79,24 @@ fn get_rust_cfgs( cmd.args(["--target", target]); } - return utf8_stdout(cmd).context("Unable to run `cargo rustc`"); - } - RustcCfgConfig::Explicit(sysroot) => { - let rustc: std::path::PathBuf = sysroot.discover_rustc()?.into(); - tracing::debug!(?rustc, "using explicit rustc from sysroot"); - Command::new(rustc) - } - RustcCfgConfig::Discover => { - let rustc = toolchain::rustc(); - tracing::debug!(?rustc, "using rustc from env"); - Command::new(rustc) + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print cfg`, falling back to invoking rustc directly: {e}"); + sysroot + } + } } + RustcCfgConfig::Rustc(sysroot) => sysroot, }; + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { cmd.args(["--target", target]); } - utf8_stdout(cmd).context("Unable to run `rustc`") + utf8_stdout(cmd).context("unable to fetch cfgs via `rustc --print cfg -O`") } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 9e19a525838..07cfaba2d2c 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -4,24 +4,38 @@ //! but we can't process `.rlib` and need source code instead. The source code //! is typically installed with `rustup component add rust-src` command. -use std::{env, fs, iter, ops, path::PathBuf, process::Command}; +use std::{env, fs, iter, ops, path::PathBuf, process::Command, sync::Arc}; -use anyhow::{format_err, Context, Result}; +use anyhow::{format_err, Result}; use base_db::CrateName; use itertools::Itertools; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; +use toolchain::probe_for_binary; use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath}; -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Debug, Clone)] pub struct Sysroot { root: AbsPathBuf, - src_root: AbsPathBuf, + src_root: Option>>, mode: SysrootMode, } +impl Eq for Sysroot {} +impl PartialEq for Sysroot { + fn eq(&self, other: &Self) -> bool { + self.root == other.root + && self.mode == other.mode + && match (&self.src_root, &other.src_root) { + (Some(Ok(this)), Some(Ok(other))) => this == other, + (None, None) | (Some(Err(_)), Some(Err(_))) => true, + _ => false, + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum SysrootMode { Workspace(CargoWorkspace), @@ -86,8 +100,8 @@ impl Sysroot { /// Returns the sysroot "source" directory, where stdlib sources are located, like: /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` - pub fn src_root(&self) -> &AbsPath { - &self.src_root + pub fn src_root(&self) -> Option<&AbsPath> { + self.src_root.as_ref()?.as_deref().ok() } pub fn is_empty(&self) -> bool { @@ -98,6 +112,11 @@ impl Sysroot { } pub fn loading_warning(&self) -> Option { + let src_root = match &self.src_root { + None => return Some(format!("sysroot at `{}` has no library sources", self.root)), + Some(Ok(src_root)) => src_root, + Some(Err(e)) => return Some(e.to_string()), + }; let has_core = match &self.mode { SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(), @@ -108,10 +127,7 @@ impl Sysroot { } else { " try running `rustup component add rust-src` to possible fix this" }; - Some(format!( - "could not find libcore in loaded sysroot at `{}`{var_note}", - self.src_root.as_path(), - )) + Some(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,)) } else { None } @@ -140,8 +156,19 @@ impl Sysroot { tracing::debug!("discovering sysroot for {dir}"); let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = - discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) + } + + pub fn discover_no_source( + dir: &AbsPath, + extra_env: &FxHashMap, + ) -> Result { + tracing::debug!("discovering sysroot for {dir}"); + let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; + let sysroot_src_dir = + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), false)) } pub fn discover_with_src_override( @@ -152,33 +179,59 @@ impl Sysroot { ) -> Result { tracing::debug!("discovering sysroot for {current_dir}"); let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, src, metadata)) + Ok(Sysroot::load(sysroot_dir, Some(Ok(src)), metadata)) } pub fn discover_rustc_src(&self) -> Option { get_rustc_src(&self.root) } - pub fn discover_rustc(&self) -> anyhow::Result { - let rustc = self.root.join("bin/rustc"); - tracing::debug!(?rustc, "checking for rustc binary at location"); - match fs::metadata(&rustc) { - Ok(_) => Ok(rustc), - Err(e) => Err(e).context(format!( - "failed to discover rustc in sysroot: {:?}", - AsRef::::as_ref(&self.root) - )), - } - } - pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Result { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { format_err!("can't load standard library from sysroot path {sysroot_dir}") - })?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + }); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) } - pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf, metadata: bool) -> Sysroot { + pub fn set_rustup_toolchain_env(cmd: &mut Command, sysroot: Option<&Self>) { + if let Some(sysroot) = sysroot { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(&sysroot.root)); + } + } + + pub fn discover_proc_macro_srv(&self) -> anyhow::Result { + ["libexec", "lib"] + .into_iter() + .map(|segment| self.root().join(segment).join("rust-analyzer-proc-macro-srv")) + .find_map(|server_path| probe_for_binary(server_path.into())) + .map(AbsPathBuf::assert) + .ok_or_else(|| { + anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", self.root()) + }) + } + + pub fn load( + sysroot_dir: AbsPathBuf, + sysroot_src_dir: Option>, + metadata: bool, + ) -> Sysroot { + let sysroot_src_dir = match sysroot_src_dir { + Some(Ok(sysroot_src_dir)) => sysroot_src_dir, + Some(Err(e)) => { + return Sysroot { + root: sysroot_dir, + src_root: Some(Err(Arc::new(e))), + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + None => { + return Sysroot { + root: sysroot_dir, + src_root: None, + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + }; if metadata { let sysroot: Option<_> = (|| { let sysroot_cargo_toml = ManifestPath::try_from( @@ -187,10 +240,19 @@ impl Sysroot { .ok()?; let current_dir = AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; + + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + let res = CargoWorkspace::fetch_metadata( &sysroot_cargo_toml, ¤t_dir, - &CargoConfig::default(), + &cargo_config, + None, &|_| (), ) .map_err(|e| { @@ -274,7 +336,7 @@ impl Sysroot { let cargo_workspace = CargoWorkspace::new(res); Some(Sysroot { root: sysroot_dir.clone(), - src_root: sysroot_src_dir.clone(), + src_root: Some(Ok(sysroot_src_dir.clone())), mode: SysrootMode::Workspace(cargo_workspace), }) })(); @@ -326,7 +388,7 @@ impl Sysroot { } Sysroot { root: sysroot_dir, - src_root: sysroot_src_dir, + src_root: Some(Ok(sysroot_src_dir)), mode: SysrootMode::Stitched(stitched), } } diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index cb995857ec7..af635dda578 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -3,38 +3,58 @@ use std::process::Command; use rustc_hash::FxHashMap; -use crate::{utf8_stdout, ManifestPath}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; + +/// Determines how `rustc --print target-spec-json` is discovered and invoked. +pub enum RustcDataLayoutConfig<'a> { + /// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), +} pub fn get( - cargo_toml: Option<&ManifestPath>, + config: RustcDataLayoutConfig<'_>, target: Option<&str>, extra_env: &FxHashMap, ) -> anyhow::Result { - let output = (|| { - if let Some(cargo_toml) = cargo_toml { - let mut cmd = Command::new(toolchain::rustc()); + let process = |output: String| { + (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() + .ok_or_else(|| { + anyhow::format_err!("could not fetch target-spec-json from command output") + }) + }; + let sysroot = match config { + RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); } match utf8_stdout(cmd) { - Ok(it) => return Ok(it), - Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"), + Ok(output) => return process(output), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print target-spec-json`, falling back to invoking rustc directly: {e}"); + sysroot + } } } - // using unstable cargo features failed, fall back to using plain rustc - let mut cmd = Command::new(toolchain::rustc()); - cmd.envs(extra_env) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(target) = target { - cmd.args(["--target", target]); - } - utf8_stdout(cmd) - })()?; - (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() - .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output")) + RustcDataLayoutConfig::Rustc(sysroot) => sysroot, + }; + + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + process(utf8_stdout(cmd)?) } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 74042e925ed..b9b1b701f6d 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -9,6 +9,7 @@ use expect_test::{expect_file, ExpectFile}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; +use triomphe::Arc; use crate::{ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, @@ -34,6 +35,7 @@ fn load_cargo_with_overrides( cfg_overrides, toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; to_crate_graph(project_workspace) } @@ -53,6 +55,7 @@ fn load_cargo_with_fake_sysroot( cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { @@ -69,8 +72,13 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { let data = get_test_json_file(file); let project = rooted_project_json(data); let sysroot = Ok(get_fake_sysroot()); - let project_workspace = - ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new(), toolchain: None }; + let project_workspace = ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: Vec::new(), + toolchain: None, + target_layout: Err(Arc::from("test has no data layout")), + }; to_crate_graph(project_workspace) } @@ -125,7 +133,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(sysroot_dir, sysroot_src_dir, false) + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { @@ -230,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_| ()); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -246,62 +254,10 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_| ()); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); assert_eq!(crate_graph.iter().count(), 118); } -#[test] -fn test_deduplicate_origin_dev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - -#[test] -fn test_deduplicate_origin_dev_rev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - #[test] fn smoke_test_real_sysroot_cargo() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() { @@ -327,6 +283,7 @@ fn smoke_test_real_sysroot_cargo() { cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index cda5ad2f110..b7ae76be8ce 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -6,14 +6,15 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use stdx::always; +use toolchain::Tool; use triomphe::Arc; use crate::{ @@ -23,8 +24,9 @@ use crate::{ project_json::Crate, rustc_cfg::{self, RustcCfgConfig}, sysroot::{SysrootCrate, SysrootMode}, - target_data_layout, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, - Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, + target_data_layout::{self, RustcDataLayoutConfig}, + utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, + ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -69,7 +71,8 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, - target_layout: Result, + target_layout: TargetLayoutLoadResult, + cargo_config_extra_env: FxHashMap, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { @@ -79,6 +82,7 @@ pub enum ProjectWorkspace { /// `rustc --print cfg`. rustc_cfg: Vec, toolchain: Option, + target_layout: TargetLayoutLoadResult, }, // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. @@ -111,7 +115,8 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout, + target_layout, + cargo_config_extra_env, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -124,16 +129,25 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) - .field("data_layout", &data_layout) + .field("data_layout", &target_layout) + .field("cargo_config_extra_env", &cargo_config_extra_env) .finish(), - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout, + } => { let mut debug_struct = f.debug_struct("Json"); debug_struct.field("n_crates", &project.n_crates()); if let Ok(sysroot) = sysroot { debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); } - debug_struct.field("toolchain", &toolchain); - debug_struct.field("n_rustc_cfg", &rustc_cfg.len()); + debug_struct + .field("toolchain", &toolchain) + .field("n_rustc_cfg", &rustc_cfg.len()) + .field("data_layout", &data_layout); debug_struct.finish() } ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f @@ -146,6 +160,28 @@ impl fmt::Debug for ProjectWorkspace { } } +fn get_toolchain_version( + current_dir: &AbsPath, + sysroot: Option<&Sysroot>, + tool: Tool, + extra_env: &FxHashMap, + prefix: &str, +) -> Result, anyhow::Error> { + let cargo_version = utf8_stdout({ + let mut cmd = Command::new(tool.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env); + cmd.arg("--version").current_dir(current_dir); + cmd + }) + .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; + anyhow::Ok( + cargo_version + .get(prefix.len()..) + .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), + ) +} + impl ProjectWorkspace { pub fn load( manifest: ProjectManifest, @@ -161,20 +197,6 @@ impl ProjectWorkspace { config: &CargoConfig, progress: &dyn Fn(String), ) -> anyhow::Result { - let version = |current_dir, cmd_path, prefix: &str| { - let cargo_version = utf8_stdout({ - let mut cmd = Command::new(cmd_path); - cmd.envs(&config.extra_env); - cmd.arg("--version").current_dir(current_dir); - cmd - }) - .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; - anyhow::Ok( - cargo_version - .get(prefix.len()..) - .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), - ) - }; let res = match manifest { ProjectManifest::ProjectJson(project_json) => { let file = fs::read_to_string(project_json) @@ -182,30 +204,14 @@ impl ProjectWorkspace { let data = serde_json::from_str(&file) .with_context(|| format!("Failed to deserialize json file {project_json}"))?; let project_location = project_json.parent().to_path_buf(); - let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?; - let project_json = ProjectJson::new(&project_location, data); + let project_json: ProjectJson = ProjectJson::new(&project_location, data); ProjectWorkspace::load_inline( project_json, config.target.as_deref(), &config.extra_env, - toolchain, ) } ProjectManifest::CargoToml(cargo_toml) => { - let toolchain = version(cargo_toml.parent(), toolchain::cargo(), "cargo ")?; - let meta = CargoWorkspace::fetch_metadata( - cargo_toml, - cargo_toml.parent(), - config, - progress, - ) - .with_context(|| { - format!( - "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", - ) - })?; - let cargo = CargoWorkspace::new(meta); - let sysroot = match (&config.sysroot, &config.sysroot_src) { (Some(RustLibSource::Path(path)), None) => { Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata).map_err(|e| { @@ -218,7 +224,7 @@ impl ProjectWorkspace { }) } (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { - Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone(), config.sysroot_query_metadata)) + Ok(Sysroot::load(sysroot.clone(), Some(Ok(sysroot_src.clone())), config.sysroot_query_metadata)) } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( @@ -231,18 +237,19 @@ impl ProjectWorkspace { } (None, _) => Err(None), }; + let sysroot_ref = sysroot.as_ref().ok(); if let Ok(sysroot) = &sysroot { - tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); + tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_dir = match &config.rustc_source { Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), Some(RustLibSource::Discover) => { - sysroot.as_ref().ok().and_then(Sysroot::discover_rustc_src).ok_or_else( - || Some("Failed to discover rustc source for sysroot.".to_owned()), - ) + sysroot_ref.and_then(Sysroot::discover_rustc_src).ok_or_else(|| { + Some("Failed to discover rustc source for sysroot.".to_owned()) + }) } None => Err(None), }; @@ -256,6 +263,7 @@ impl ProjectWorkspace { features: crate::CargoFeatures::default(), ..config.clone() }, + sysroot_ref, progress, ) { Ok(meta) => { @@ -264,6 +272,7 @@ impl ProjectWorkspace { &workspace, cargo_toml.parent(), &config.extra_env, + sysroot_ref ); Ok(Box::new((workspace, buildscripts))) } @@ -279,21 +288,45 @@ impl ProjectWorkspace { } }); + let toolchain = get_toolchain_version( + cargo_toml.parent(), + sysroot_ref, + toolchain::Tool::Cargo, + &config.extra_env, + "cargo ", + )?; let rustc_cfg = rustc_cfg::get( config.target.as_deref(), &config.extra_env, - RustcCfgConfig::Cargo(cargo_toml), + RustcCfgConfig::Cargo(sysroot_ref, cargo_toml), ); let cfg_overrides = config.cfg_overrides.clone(); let data_layout = target_data_layout::get( - Some(cargo_toml), + RustcDataLayoutConfig::Cargo(sysroot_ref, cargo_toml), config.target.as_deref(), &config.extra_env, ); if let Err(e) = &data_layout { tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); } + + let meta = CargoWorkspace::fetch_metadata( + cargo_toml, + cargo_toml.parent(), + config, + sysroot_ref, + progress, + ) + .with_context(|| { + format!( + "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", + ) + })?; + let cargo = CargoWorkspace::new(meta); + + let cargo_config_extra_env = + cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -302,7 +335,10 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout.map_err(|it| it.to_string()), + target_layout: data_layout + .map(Arc::from) + .map_err(|it| Arc::from(it.to_string())), + cargo_config_extra_env, } } }; @@ -314,15 +350,16 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, extra_env: &FxHashMap, - toolchain: Option, ) -> ProjectWorkspace { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { - (Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src, false)), + (Some(sysroot), Some(sysroot_src)) => { + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) + } (Some(sysroot), None) => { // assume sysroot is structured like rustup's and guess `sysroot_src` let sysroot_src = sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, Some(sysroot_src)) => { // assume sysroot is structured like rustup's and guess `sysroot` @@ -330,23 +367,36 @@ impl ProjectWorkspace { for _ in 0..5 { sysroot.pop(); } - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, None) => Err(None), }; - let config = match &sysroot { - Ok(sysroot) => { - tracing::debug!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::debug!("discovering sysroot"); - RustcCfgConfig::Discover + let sysroot_ref = sysroot.as_ref().ok(); + let cfg_config = RustcCfgConfig::Rustc(sysroot_ref); + let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref); + let toolchain = match get_toolchain_version( + project_json.path(), + sysroot_ref, + toolchain::Tool::Rustc, + extra_env, + "rustc ", + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("{e}"); + None } }; - let rustc_cfg = rustc_cfg::get(target, extra_env, config); - ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg, toolchain } + let rustc_cfg = rustc_cfg::get(target, extra_env, cfg_config); + let data_layout = target_data_layout::get(data_layout_config, target, extra_env); + ProjectWorkspace::Json { + project: project_json, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + } } pub fn load_detached_files( @@ -373,18 +423,11 @@ impl ProjectWorkspace { } None => Err(None), }; - let rustc_config = match &sysroot { - Ok(sysroot) => { - tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::info!("discovering sysroot"); - RustcCfgConfig::Discover - } - }; - - let rustc_cfg = rustc_cfg::get(None, &FxHashMap::default(), rustc_config); + let rustc_cfg = rustc_cfg::get( + None, + &FxHashMap::default(), + RustcCfgConfig::Rustc(sysroot.as_ref().ok()), + ); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -395,11 +438,17 @@ impl ProjectWorkspace { progress: &dyn Fn(String), ) -> anyhow::Result { match self { - ProjectWorkspace::Cargo { cargo, toolchain, .. } => { - WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) - .with_context(|| { - format!("Failed to run build scripts for {}", cargo.workspace_root()) - }) + ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => { + WorkspaceBuildScripts::run_for_workspace( + config, + cargo, + progress, + toolchain, + sysroot.as_ref().ok(), + ) + .with_context(|| { + format!("Failed to run build scripts for {}", cargo.workspace_root()) + }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { Ok(WorkspaceBuildScripts::default()) @@ -472,18 +521,7 @@ impl ProjectWorkspace { ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. } | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } | ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => { - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - ["libexec", "lib"] - .into_iter() - .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) - .find(|server_path| std::fs::metadata(server_path).is_ok()) - .ok_or_else(|| { - anyhow::format_err!( - "cannot find proc-macro server in sysroot `{}`", - sysroot.root() - ) - }) + sysroot.discover_proc_macro_srv() } ProjectWorkspace::DetachedFiles { .. } => { Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found")) @@ -503,8 +541,7 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec { - let mk_sysroot = |sysroot: Result<_, _>, project_root: Option<&AbsPath>| { - let project_root = project_root.map(ToOwned::to_owned); + let mk_sysroot = |sysroot: Result<_, _>| { sysroot.into_iter().flat_map(move |sysroot: &Sysroot| { let mut r = match sysroot.mode() { SysrootMode::Workspace(ws) => ws @@ -532,18 +569,21 @@ impl ProjectWorkspace { }; r.push(PackageRoot { - // mark the sysroot as mutable if it is located inside of the project - is_local: project_root - .as_ref() - .map_or(false, |project_root| sysroot.src_root().starts_with(project_root)), - include: vec![sysroot.src_root().to_path_buf()], + is_local: false, + include: sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(), exclude: Vec::new(), }); r }) }; match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg: _, toolchain: _ } => project + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: _, + toolchain: _, + target_layout: _, + } => project .crates() .map(|(_, krate)| PackageRoot { is_local: krate.is_workspace_member, @@ -552,7 +592,7 @@ impl ProjectWorkspace { }) .collect::>() .into_iter() - .chain(mk_sysroot(sysroot.as_ref(), Some(project.path()))) + .chain(mk_sysroot(sysroot.as_ref())) .collect::>(), ProjectWorkspace::Cargo { cargo, @@ -563,6 +603,7 @@ impl ProjectWorkspace { build_scripts, toolchain: _, target_layout: _, + cargo_config_extra_env: _, } => { cargo .packages() @@ -586,7 +627,7 @@ impl ProjectWorkspace { let extra_targets = cargo[pkg] .targets .iter() - .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib) + .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. })) .filter_map(|&tgt| cargo[tgt].root.parent()) .map(|tgt| tgt.normalize().to_path_buf()) .filter(|path| !path.starts_with(&pkg_root)); @@ -602,7 +643,7 @@ impl ProjectWorkspace { } PackageRoot { is_local, include, exclude } }) - .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) + .chain(mk_sysroot(sysroot.as_ref())) .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| { rustc.packages().map(move |krate| PackageRoot { is_local: false, @@ -619,7 +660,7 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(sysroot.as_ref(), None)) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } @@ -651,17 +692,19 @@ impl ProjectWorkspace { let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); let (mut crate_graph, proc_macros) = match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { - project_json_to_crate_graph( - rustc_cfg.clone(), - load, - project, - sysroot.as_ref().ok(), - extra_env, - Err("rust-project.json projects have no target layout set".into()), - toolchain.clone(), - ) - } + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => project_json_to_crate_graph( + rustc_cfg.clone(), + load, + project, + sysroot.as_ref().ok(), + extra_env, + ), ProjectWorkspace::Cargo { cargo, sysroot, @@ -669,8 +712,9 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, build_scripts, - toolchain, - target_layout, + toolchain: _, + target_layout: _, + cargo_config_extra_env: _, } => cargo_to_crate_graph( load, rustc.as_ref().map(|a| a.as_ref()).ok(), @@ -679,20 +723,9 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - match target_layout.as_ref() { - Ok(it) => Ok(Arc::from(it.as_str())), - Err(it) => Err(Arc::from(it.as_str())), - }, - toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph( - rustc_cfg.clone(), - load, - files, - sysroot.as_ref().ok(), - Err("detached file projects have no target layout set".into()), - ) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; if crate_graph.patch_cfg_if() { @@ -713,6 +746,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -723,6 +757,7 @@ impl ProjectWorkspace { rustc_cfg: o_rustc_cfg, cfg_overrides: o_cfg_overrides, toolchain: o_toolchain, + cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -733,14 +768,16 @@ impl ProjectWorkspace { && cfg_overrides == o_cfg_overrides && toolchain == o_toolchain && sysroot == o_sysroot + && cargo_config_extra_env == o_cargo_config_extra_env } ( - Self::Json { project, sysroot, rustc_cfg, toolchain }, + Self::Json { project, sysroot, rustc_cfg, toolchain, target_layout: _ }, Self::Json { project: o_project, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg, toolchain: o_toolchain, + target_layout: _, }, ) => { project == o_project @@ -771,21 +808,12 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: Option<&Sysroot>, extra_env: &FxHashMap, - target_layout: TargetLayoutLoadResult, - toolchain: Option, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; - let sysroot_deps = sysroot.as_ref().map(|sysroot| { - sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain.as_ref(), - ) - }); + let sysroot_deps = sysroot + .as_ref() + .map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load)); let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); @@ -813,12 +841,7 @@ fn project_json_to_crate_graph( let target_cfgs = match target.as_deref() { Some(target) => cfg_cache.entry(target).or_insert_with(|| { - let rustc_cfg = match sysroot { - Some(sysroot) => RustcCfgConfig::Explicit(sysroot), - None => RustcCfgConfig::Discover, - }; - - rustc_cfg::get(Some(target), extra_env, rustc_cfg) + rustc_cfg::get(Some(target), extra_env, RustcCfgConfig::Rustc(sysroot)) }), None => &rustc_cfg, }; @@ -845,8 +868,6 @@ fn project_json_to_crate_graph( } else { CrateOrigin::Local { repo: None, name: None } }, - target_layout.clone(), - toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -873,7 +894,7 @@ fn project_json_to_crate_graph( for dep in &krate.deps { if let Some(&to) = crates.get(&dep.crate_id) { - add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned()) + add_dep(crate_graph, from, dep.name.clone(), to) } } } @@ -889,22 +910,13 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let crate_graph = &mut res.0; let proc_macros = &mut res.1; let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain, - ), + Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -926,8 +938,6 @@ fn cargo_to_crate_graph( // Add test cfg for local crates if cargo[pkg].is_local { cfg_options.insert_atom("test".into()); - } - if cargo[pkg].is_member { cfg_options.insert_atom("rust_analyzer".into()); } @@ -949,7 +959,7 @@ fn cargo_to_crate_graph( let mut lib_tgt = None; for &tgt in cargo[pkg].targets.iter() { - if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member { + if !matches!(cargo[tgt].kind, TargetKind::Lib { .. }) && !cargo[pkg].is_member { // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't // add any targets except the library target, since those will not work correctly if // they use dev-dependencies. @@ -957,46 +967,46 @@ fn cargo_to_crate_graph( // https://github.com/rust-lang/rust-analyzer/issues/11300 continue; } - let &TargetData { ref name, kind, is_proc_macro, ref root, .. } = &cargo[tgt]; - - if kind == TargetKind::Lib - && sysroot.map_or(false, |sysroot| root.starts_with(sysroot.src_root())) - { - if let Some(&(_, crate_id, _)) = - public_deps.deps.iter().find(|(dep_name, ..)| dep_name.as_smol_str() == name) - { - pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); - - lib_tgt = Some((crate_id, name.clone())); - pkg_to_lib_crate.insert(pkg, crate_id); - // sysroot is inside the workspace, prevent the sysroot crates from being duplicated here - continue; - } - } + let &TargetData { ref name, kind, ref root, .. } = &cargo[tgt]; let Some(file_id) = load(root) else { continue }; + let build_data = build_scripts.get_output(pkg); + let pkg_data = &cargo[pkg]; let crate_id = add_target_crate_root( crate_graph, proc_macros, - &cargo[pkg], - build_scripts.get_output(pkg), + pkg_data, + build_data, cfg_options.clone(), file_id, name, - is_proc_macro, - target_layout.clone(), - false, - toolchain.cloned(), + kind, + if pkg_data.is_local { + CrateOrigin::Local { + repo: pkg_data.repository.clone(), + name: Some(pkg_data.name.clone()), + } + } else { + CrateOrigin::Library { + repo: pkg_data.repository.clone(), + name: pkg_data.name.clone(), + } + }, ); - if kind == TargetKind::Lib { + if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); pkg_to_lib_crate.insert(pkg, crate_id); } // Even crates that don't set proc-macro = true are allowed to depend on proc_macro // (just none of the APIs work when called outside of a proc macro). if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); + add_proc_macro_dep( + crate_graph, + crate_id, + proc_macro, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + ); } pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); @@ -1016,7 +1026,7 @@ fn cargo_to_crate_graph( // cargo metadata does not do any normalization, // so we do it ourselves currently let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1036,17 +1046,7 @@ fn cargo_to_crate_graph( continue; } - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ) + add_dep(crate_graph, from, name.clone(), to) } } } @@ -1074,8 +1074,6 @@ fn cargo_to_crate_graph( } else { rustc_build_scripts }, - target_layout, - toolchain, ); } } @@ -1087,19 +1085,11 @@ fn detached_files_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, - target_layout: TargetLayoutLoadResult, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - &mut crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - None, - ), + Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -1131,8 +1121,6 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_owned()), }, - target_layout.clone(), - None, ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -1153,8 +1141,6 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1194,9 +1180,9 @@ fn handle_rustc_crates( }; for &tgt in rustc_workspace[pkg].targets.iter() { - if rustc_workspace[tgt].kind != TargetKind::Lib { + let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else { continue; - } + }; if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, @@ -1206,21 +1192,14 @@ fn handle_rustc_crates( cfg_options.clone(), file_id, &rustc_workspace[tgt].name, - rustc_workspace[tgt].is_proc_macro, - target_layout.clone(), - true, - toolchain.cloned(), + kind, + CrateOrigin::Rustc { name: rustc_workspace[pkg].name.clone() }, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate public_deps.add_to_crate_graph(crate_graph, crate_id); if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep( - crate_graph, - crate_id, - proc_macro, - rustc_workspace[tgt].is_proc_macro, - ); + add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); } rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } @@ -1234,17 +1213,7 @@ fn handle_rustc_crates( let name = CrateName::new(&dep.name).unwrap(); if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() { - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ); + add_dep(crate_graph, from, name.clone(), to); } } } @@ -1266,7 +1235,7 @@ fn handle_rustc_crates( // `rust_analyzer` thinks that it should use the one from the `rustc_source` // instead of the one from `crates.io` if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) { - add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal); + add_dep(crate_graph, *from, name.clone(), to); } } } @@ -1282,10 +1251,8 @@ fn add_target_crate_root( cfg_options: CfgOptions, file_id: FileId, cargo_name: &str, - is_proc_macro: bool, - target_layout: TargetLayoutLoadResult, - rustc_crate: bool, - toolchain: Option, + kind: TargetKind, + origin: CrateOrigin, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1332,18 +1299,10 @@ fn add_target_crate_root( cfg_options, potential_cfg_options, env, - is_proc_macro, - if rustc_crate { - CrateOrigin::Rustc { name: pkg.name.clone() } - } else if pkg.is_member { - CrateOrigin::Local { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) } - } else { - CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } - }, - target_layout, - toolchain, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + origin, ); - if is_proc_macro { + if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))), None => Some(Err("crate has not yet been built".to_owned())), @@ -1365,14 +1324,7 @@ impl SysrootPublicDeps { /// Makes `from` depend on the public sysroot crates. fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) { for (name, krate, prelude) in &self.deps { - add_dep_with_prelude( - crate_graph, - from, - name.clone(), - *krate, - *prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude); } } } @@ -1381,9 +1333,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, - target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered(); match sysroot.mode() { @@ -1396,8 +1346,6 @@ fn sysroot_to_crate_graph( rustc_cfg, &CfgOverrides::default(), &WorkspaceBuildScripts::default(), - target_layout, - toolchain, ); let mut pub_deps = vec![]; @@ -1440,17 +1388,16 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); + let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b); - crate_graph.extend(cg, &mut pm, |mapping| { - // Map the id through the removal mapping first, then through the crate graph extension mapping. - pub_deps.iter_mut().for_each(|(_, cid, _)| { - *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] - }); - if let Some(libproc_macro) = &mut libproc_macro { - *libproc_macro = mapping - [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; - } + // Map the id through the removal mapping first, then through the crate graph extension mapping. + pub_deps.iter_mut().for_each(|(_, cid, _)| { + *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] }); + if let Some(libproc_macro) = &mut libproc_macro { + *libproc_macro = mapping + [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; + } (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } @@ -1474,8 +1421,6 @@ fn sysroot_to_crate_graph( env, false, CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)), - target_layout.clone(), - toolchain.cloned(), ); Some((krate, crate_id)) }) @@ -1487,7 +1432,7 @@ fn sysroot_to_crate_graph( if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) { - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1508,14 +1453,8 @@ fn sysroot_to_crate_graph( } } -fn add_dep( - graph: &mut CrateGraph, - from: CrateId, - name: CrateName, - to: CrateId, - kind: DependencyKind, -) { - add_dep_inner(graph, from, Dependency::new(name, to, kind)) +fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) { + add_dep_inner(graph, from, Dependency::new(name, to)) } fn add_dep_with_prelude( @@ -1524,20 +1463,12 @@ fn add_dep_with_prelude( name: CrateName, to: CrateId, prelude: bool, - kind: DependencyKind, ) { - add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind)) + add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude)) } fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) { - add_dep_with_prelude( - crate_graph, - from, - CrateName::new("proc_macro").unwrap(), - to, - prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude); } fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) { @@ -1588,3 +1519,29 @@ fn create_cfg_options(rustc_cfg: Vec) -> CfgOptions { cfg_options.insert_atom("debug_assertions".into()); cfg_options } + +fn cargo_config_env( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, +) -> FxHashMap { + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); + cargo_config.envs(extra_env); + cargo_config + .current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "config", "get", "env"]) + .env("RUSTC_BOOTSTRAP", "1"); + // if successful we receive `env.key.value = "value" per entry + tracing::debug!("Discovering cargo config env by {:?}", cargo_config); + utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default() +} + +fn parse_output_cargo_config_env(stdout: String) -> FxHashMap { + stdout + .lines() + .filter_map(|l| l.strip_prefix("env.")) + .filter_map(|l| l.split_once(".value = ")) + .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) + .collect() +} diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index d8d9e559e5c..0ad19ca9f75 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index d8d9e559e5c..0ad19ca9f75 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index e0ba5ed498f..e2334dca875 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -47,7 +47,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -58,10 +57,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -111,7 +106,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -119,7 +113,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -130,10 +123,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -183,7 +172,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -191,7 +179,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -202,10 +189,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -255,7 +238,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -263,7 +245,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -274,10 +255,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -343,9 +320,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 0df99534c5b..ccaba963ded 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -28,7 +28,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -36,10 +35,6 @@ Alloc, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -69,10 +64,6 @@ Core, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -102,10 +93,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -135,10 +122,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -169,7 +152,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -177,7 +159,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -185,10 +166,6 @@ ProcMacro, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -218,10 +195,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -252,7 +225,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -260,7 +232,6 @@ name: CrateName( "panic_unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -268,7 +239,6 @@ name: CrateName( "panic_abort", ), - kind: Normal, prelude: true, }, Dependency { @@ -276,7 +246,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -284,7 +253,6 @@ name: CrateName( "profiler_builtins", ), - kind: Normal, prelude: true, }, Dependency { @@ -292,7 +260,6 @@ name: CrateName( "unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -300,7 +267,6 @@ name: CrateName( "std_detect", ), - kind: Normal, prelude: true, }, Dependency { @@ -308,7 +274,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: true, }, ], @@ -316,10 +281,6 @@ Std, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -349,10 +310,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -382,10 +339,6 @@ Test, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -415,10 +368,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -449,7 +398,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -457,7 +405,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -465,7 +412,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -473,7 +419,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: false, }, Dependency { @@ -481,7 +426,6 @@ name: CrateName( "proc_macro", ), - kind: Normal, prelude: false, }, ], @@ -492,9 +436,5 @@ ), }, is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 269dd3cfffe..07e04a83661 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -11,7 +11,7 @@ extern crate rustc_driver as _; mod rustc_wrapper; -use std::{env, fs, path::PathBuf, process, sync::Arc}; +use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; @@ -27,21 +27,15 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; -fn main() -> anyhow::Result<()> { +fn main() -> anyhow::Result { if std::env::var("RA_RUSTC_WRAPPER").is_ok() { - let mut args = std::env::args_os(); - let _me = args.next().unwrap(); - let rustc = args.next().unwrap(); - let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { - Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), - Err(err) => { - eprintln!("{err}"); - 101 - } - }; - process::exit(code); + rustc_wrapper::main().map_err(Into::into) + } else { + actual_main() } +} +fn actual_main() -> anyhow::Result { let flags = flags::RustAnalyzer::from_env_or_exit(); #[cfg(debug_assertions)] @@ -58,14 +52,14 @@ fn main() -> anyhow::Result<()> { let verbosity = flags.verbosity(); match flags.subcommand { - flags::RustAnalyzerCmd::LspServer(cmd) => { + flags::RustAnalyzerCmd::LspServer(cmd) => 'lsp_server: { if cmd.print_config_schema { println!("{:#}", Config::json_schema()); - return Ok(()); + break 'lsp_server; } if cmd.version { println!("rust-analyzer {}", rust_analyzer::version()); - return Ok(()); + break 'lsp_server; } // rust-analyzer’s “main thread” is actually @@ -90,7 +84,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } - Ok(()) + Ok(ExitCode::SUCCESS) } fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { diff --git a/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/crates/rust-analyzer/src/bin/rustc_wrapper.rs index 38e9c7dd7e1..684b3f52afc 100644 --- a/crates/rust-analyzer/src/bin/rustc_wrapper.rs +++ b/crates/rust-analyzer/src/bin/rustc_wrapper.rs @@ -7,13 +7,17 @@ use std::{ ffi::OsString, io, - process::{Command, Stdio}, + process::{Command, ExitCode, Stdio}, }; -/// ExitCode/ExitStatus are impossible to create :(. -pub(crate) struct ExitCode(pub(crate) Option); +pub(crate) fn main() -> io::Result { + let mut args = std::env::args_os(); + let _me = args.next().unwrap(); + let rustc = args.next().unwrap(); + run_rustc_skipping_cargo_checking(rustc, args.collect()) +} -pub(crate) fn run_rustc_skipping_cargo_checking( +fn run_rustc_skipping_cargo_checking( rustc_executable: OsString, args: Vec, ) -> io::Result { @@ -35,9 +39,10 @@ pub(crate) fn run_rustc_skipping_cargo_checking( arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link") }); if not_invoked_by_build_script && is_cargo_check { - return Ok(ExitCode(Some(0))); + Ok(ExitCode::from(0)) + } else { + run_rustc(rustc_executable, args) } - run_rustc(rustc_executable, args) } fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { @@ -47,5 +52,5 @@ fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { + TargetKind::Lib { is_proc_macro: _ } => { buf.push("--lib".to_owned()); } TargetKind::Other | TargetKind::BuildScript => (), diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 2741b452225..ce7e3b3cd6a 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -32,7 +32,7 @@ use oorandom::Rand32; use profile::{Bytes, StopWatch}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rayon::prelude::*; -use rustc_hash::FxHashSet; +use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{AstNode, SyntaxNode}; use vfs::{AbsPathBuf, FileId, Vfs, VfsPath}; @@ -91,7 +91,7 @@ impl flags::AnalysisStats { }; let (host, vfs, _proc_macro) = - load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; + load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?; let db = host.raw_database(); eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); eprint!(" (metadata {metadata_time}"); @@ -232,7 +232,11 @@ impl flags::AnalysisStats { } if self.run_all_ide_things { - self.run_ide_things(host.analysis(), file_ids); + self.run_ide_things(host.analysis(), file_ids.clone()); + } + + if self.run_term_search { + self.run_term_search(&workspace, db, &vfs, file_ids, verbosity); } let total_span = analysis_sw.elapsed(); @@ -321,6 +325,212 @@ impl flags::AnalysisStats { report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms"); } + fn run_term_search( + &self, + ws: &ProjectWorkspace, + db: &RootDatabase, + vfs: &Vfs, + mut file_ids: Vec, + verbosity: Verbosity, + ) { + let cargo_config = CargoConfig { + sysroot: match self.no_sysroot { + true => None, + false => Some(RustLibSource::Discover), + }, + ..Default::default() + }; + + let mut bar = match verbosity { + Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), + _ => ProgressReport::new(file_ids.len() as u64), + }; + + file_ids.sort(); + file_ids.dedup(); + + #[derive(Debug, Default)] + struct Acc { + tail_expr_syntax_hits: u64, + tail_expr_no_term: u64, + total_tail_exprs: u64, + error_codes: FxHashMap, + syntax_errors: u32, + } + + let mut acc: Acc = Default::default(); + bar.tick(); + let mut sw = self.stop_watch(); + + for &file_id in &file_ids { + let sema = hir::Semantics::new(db); + let _ = db.parse(file_id); + + let parse = sema.parse(file_id); + let file_txt = db.file_text(file_id); + let path = vfs.file_path(file_id).as_path().unwrap().to_owned(); + + for node in parse.syntax().descendants() { + let expr = match syntax::ast::Expr::cast(node.clone()) { + Some(it) => it, + None => continue, + }; + let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) { + Some(it) => it, + None => continue, + }; + let target_ty = match sema.type_of_expr(&expr) { + Some(it) => it.adjusted(), + None => continue, // Failed to infer type + }; + + let expected_tail = match block.tail_expr() { + Some(it) => it, + None => continue, + }; + + if expected_tail.is_block_like() { + continue; + } + + let range = sema.original_range(expected_tail.syntax()).range; + let original_text: String = db + .file_text(file_id) + .chars() + .skip(usize::from(range.start())) + .take(usize::from(range.end()) - usize::from(range.start())) + .collect(); + + let scope = match sema.scope(expected_tail.syntax()) { + Some(it) => it, + None => continue, + }; + + let ctx = hir::term_search::TermSearchCtx { + sema: &sema, + scope: &scope, + goal: target_ty, + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: true, + ..Default::default() + }, + }; + let found_terms = hir::term_search::term_search(&ctx); + + if found_terms.is_empty() { + acc.tail_expr_no_term += 1; + acc.total_tail_exprs += 1; + // println!("\n{}\n", &original_text); + continue; + }; + + fn trim(s: &str) -> String { + s.chars().filter(|c| !c.is_whitespace()).collect() + } + + let todo = syntax::ast::make::ext::expr_todo().to_string(); + let mut formatter = |_: &hir::Type| todo.clone(); + let mut syntax_hit_found = false; + for term in found_terms { + let generated = + term.gen_source_code(&scope, &mut formatter, false, true).unwrap(); + syntax_hit_found |= trim(&original_text) == trim(&generated); + + // Validate if type-checks + let mut txt = file_txt.to_string(); + + let edit = ide::TextEdit::replace(range, generated.clone()); + edit.apply(&mut txt); + + if self.validate_term_search { + std::fs::write(&path, txt).unwrap(); + + let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap(); + if let Some(err) = res.error() { + if err.contains("error: could not compile") { + if let Some(mut err_idx) = err.find("error[E") { + err_idx += 7; + let err_code = &err[err_idx..err_idx + 4]; + match err_code { + "0282" => continue, // Byproduct of testing method + "0277" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 + _ => (), + } + bar.println(err); + bar.println(generated); + acc.error_codes + .entry(err_code.to_owned()) + .and_modify(|n| *n += 1) + .or_insert(1); + } else { + acc.syntax_errors += 1; + bar.println(format!("Syntax error: \n{}", err)); + } + } + } + } + } + + if syntax_hit_found { + acc.tail_expr_syntax_hits += 1; + } + acc.total_tail_exprs += 1; + + let msg = move || { + format!( + "processing: {:<50}", + trim(&original_text).chars().take(50).collect::() + ) + }; + if verbosity.is_spammy() { + bar.println(msg()); + } + bar.set_message(msg); + } + // Revert file back to original state + if self.validate_term_search { + std::fs::write(&path, file_txt.to_string()).unwrap(); + } + + bar.inc(1); + } + let term_search_time = sw.elapsed(); + + bar.println(format!( + "Tail Expr syntactic hits: {}/{} ({}%)", + acc.tail_expr_syntax_hits, + acc.total_tail_exprs, + percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs) + )); + bar.println(format!( + "Tail Exprs found: {}/{} ({}%)", + acc.total_tail_exprs - acc.tail_expr_no_term, + acc.total_tail_exprs, + percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs) + )); + if self.validate_term_search { + bar.println(format!( + "Tail Exprs total errors: {}, syntax errors: {}, error codes:", + acc.error_codes.values().sum::() + acc.syntax_errors, + acc.syntax_errors, + )); + for (err, count) in acc.error_codes { + bar.println(format!( + " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)" + )); + } + } + bar.println(format!( + "Term search avg time: {}ms", + term_search_time.time.as_millis() as u64 / acc.total_tail_exprs + )); + bar.println(format!("{:<20} {}", "Term search:", term_search_time)); + report_metric("term search time", term_search_time.time.as_millis() as u64, "ms"); + + bar.finish_and_clear(); + } + fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) { let mut sw = self.stop_watch(); let mut all = 0; diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 252b1e1a485..493e614dce6 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -93,6 +93,11 @@ xflags::xflags! { /// and annotations. This is useful for benchmarking the memory usage on a project that has /// been worked on for a bit in a longer running session. optional --run-all-ide-things + /// Run term search on all the tail expressions (of functions, block, if statements etc.) + optional --run-term-search + /// Validate term search by running `cargo check` on every response. + /// Note that this also temporarily modifies the files on disk, use with caution! + optional --validate-term-search } /// Run unit tests of the project using mir interpreter @@ -218,6 +223,8 @@ pub struct AnalysisStats { pub skip_data_layout: bool, pub skip_const_eval: bool, pub run_all_ide_things: bool, + pub run_term_search: bool, + pub validate_term_search: bool, } #[derive(Debug)] diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index f4aec288348..2d56830c87f 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -135,12 +135,11 @@ impl flags::Scip { } if symbols_emitted.insert(id) { - let documentation = token - .hover - .as_ref() - .map(|hover| hover.markup.as_str()) - .filter(|it| !it.is_empty()) - .map(|it| vec![it.to_owned()]); + let documentation = match &token.documentation { + Some(doc) => vec![doc.as_str().to_owned()], + None => vec![], + }; + let position_encoding = scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); let signature_documentation = @@ -153,7 +152,7 @@ impl flags::Scip { }); let symbol_info = scip_types::SymbolInformation { symbol: symbol.clone(), - documentation: documentation.unwrap_or_default(), + documentation, relationships: Vec::new(), special_fields: Default::default(), kind: symbol_kind(token.kind).into(), @@ -599,4 +598,22 @@ pub mod example_mod { "rust-analyzer cargo main . MyTypeAlias#", ); } + + #[test] + fn documentation_matches_doc_comment() { + let s = "/// foo\nfn bar() {}"; + + let mut host = AnalysisHost::default(); + let change_fixture = ChangeFixture::parse(s); + host.raw_database_mut().apply_change(change_fixture.change); + + let analysis = host.analysis(); + let si = StaticIndex::compute(&analysis); + + let file = si.files.first().unwrap(); + let (_, token_id) = file.tokens.first().unwrap(); + let token = si.tokens.get(*token_id).unwrap(); + + assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo")); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 7bdd9ec866a..16e1a2f5449 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -112,7 +112,7 @@ config_data! { cargo_buildScripts_overrideCommand: Option> = "null", /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. - cargo_buildScripts_rebuildOnSave: bool = "false", + cargo_buildScripts_rebuildOnSave: bool = "true", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", @@ -209,6 +209,11 @@ config_data! { /// by changing `#rust-analyzer.check.invocationStrategy#` and /// `#rust-analyzer.check.invocationLocation#`. /// + /// If `$saved_file` is part of the command, rust-analyzer will pass + /// the absolute path of the saved file to the provided command. This is + /// intended to be used with non-Cargo build systems. + /// Note that `$saved_file` is experimental and may be removed in the futureg. + /// /// An example command would be: /// /// ```bash @@ -286,6 +291,8 @@ config_data! { "scope": "expr" } }"#, + /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. + completion_termSearch_enable: bool = "false", /// List of rust-analyzer diagnostics to disable. diagnostics_disabled: FxHashSet = "[]", @@ -504,9 +511,6 @@ config_data! { /// Exclude tests from find-all-references. references_excludeTests: bool = "false", - /// Allow renaming of items not belonging to the loaded workspaces. - rename_allowExternalItems: bool = "false", - /// Command to be executed instead of 'cargo' for runnables. runnables_command: Option = "null", @@ -1202,7 +1206,7 @@ impl Config { Some(AbsPathBuf::try_from(path).unwrap_or_else(|path| self.root_path.join(path))) } - pub fn dummy_replacements(&self) -> &FxHashMap, Box<[Box]>> { + pub fn ignored_proc_macros(&self) -> &FxHashMap, Box<[Box]>> { &self.data.procMacro_ignored } @@ -1535,6 +1539,7 @@ impl Config { && completion_item_edit_resolve(&self.caps), enable_self_on_the_fly: self.data.completion_autoself_enable, enable_private_editable: self.data.completion_privateEditable_enable, + enable_term_search: self.data.completion_termSearch_enable, full_function_signatures: self.data.completion_fullFunctionSignatures_enable, callable: match self.data.completion_callable_snippets { CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments), @@ -1766,10 +1771,6 @@ impl Config { self.data.typing_autoClosingAngleBrackets_enable } - pub fn rename(&self) -> bool { - self.data.rename_allowExternalItems - } - // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124 // hence, distinguish it for now. pub fn is_visual_studio_code(&self) -> bool { diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index da4422a60a8..293807a383b 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use hir::Change; use ide::{Analysis, AnalysisHost, Cancellable, FileId}; -use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; +use ide_db::base_db::{CrateId, ProcMacroPaths}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -74,8 +74,8 @@ pub(crate) struct GlobalState { pub(crate) last_reported_status: Option, // proc macros - pub(crate) proc_macro_changed: bool, pub(crate) proc_macro_clients: Arc<[anyhow::Result]>, + pub(crate) build_deps_changed: bool, // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, @@ -203,9 +203,10 @@ impl GlobalState { source_root_config: SourceRootConfig::default(), config_errors: Default::default(), - proc_macro_changed: false, proc_macro_clients: Arc::from_iter([]), + build_deps_changed: false, + flycheck: Arc::from_iter([]), flycheck_sender, flycheck_receiver, @@ -300,12 +301,19 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some((path.clone(), false)); + workspace_structure_change = Some(( + path.clone(), + false, + AsRef::::as_ref(&path).ends_with("build.rs"), + )); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = - Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); + workspace_structure_change = Some(( + path, + self.crate_graph_file_dependencies.contains(vfs_path), + false, + )); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -346,23 +354,28 @@ impl GlobalState { }; self.analysis_host.apply_change(change); + { - let raw_database = self.analysis_host.raw_database(); + if !matches!(&workspace_structure_change, Some((.., true))) { + _ = self + .deferred_task_queue + .sender + .send(crate::main_loop::QueuedTask::CheckProcMacroSources(modified_rust_files)); + } // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload)) = workspace_structure_change { + if let Some((path, force_crate_graph_reload, build_scripts_touched)) = + workspace_structure_change + { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); + if build_scripts_touched { + self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); + } } - self.proc_macro_changed = modified_rust_files.into_iter().any(|file_id| { - let crates = raw_database.relevant_crates(file_id); - let crate_graph = raw_database.crate_graph(); - - crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) - }); } true diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index d3c2073f09d..b13c709dbfe 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -145,11 +145,11 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { - if state.config.script_rebuild_on_save() && state.proc_macro_changed { - // reset the flag - state.proc_macro_changed = false; - // rebuild the proc macros - state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + if state.config.script_rebuild_on_save() && state.build_deps_changed { + state.build_deps_changed = false; + state + .fetch_build_data_queue + .request_op("build_deps_changed - save notification".to_owned(), ()); } if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -158,7 +158,7 @@ pub(crate) fn handle_did_save_text_document( if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { state .fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {abs_path}"), false); + .request_op(format!("workspace vfs file change saved {abs_path}"), false); } } @@ -168,7 +168,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } } Ok(()) @@ -314,6 +314,8 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { Some((idx, package)) }); + let saved_file = vfs_path.as_path().map(|p| p.to_owned()); + // Find and trigger corresponding flychecks for flycheck in world.flycheck.iter() { for (id, package) in workspace_ids.clone() { @@ -321,7 +323,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { updated = true; match package.filter(|_| !world.config.flycheck_workspace()) { Some(package) => flycheck.restart_for_package(package), - None => flycheck.restart_workspace(), + None => flycheck.restart_workspace(saved_file.clone()), } continue; } @@ -330,7 +332,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(saved_file.clone()); } } Ok(()) @@ -372,7 +374,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } Ok(()) } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 2a3633a48e9..eb9d4bf0f02 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -52,7 +52,7 @@ use crate::{ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); Ok(()) @@ -60,7 +60,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_build_data_queue.request_op("rebuild proc macros request".to_owned(), ()); Ok(()) @@ -1017,10 +1017,8 @@ pub(crate) fn handle_rename( let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; - let mut change = snap - .analysis - .rename(position, ¶ms.new_name, snap.config.rename())? - .map_err(to_proto::rename_error)?; + let mut change = + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move @@ -1937,6 +1935,7 @@ fn run_rustfmt( let mut command = match snap.config.rustfmt() { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { + // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::rustfmt()); cmd.envs(snap.config.extra_env()); cmd.args(extra_args); diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index acc02d6447c..f0eee77aff5 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -132,6 +132,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -175,6 +176,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -216,6 +218,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index b1809f58ae7..473ca991ad9 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -47,7 +47,9 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; -pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; +pub use crate::{ + caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, +}; pub fn from_json( what: &'static str, diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 64f19f0b32d..727007bba08 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -123,6 +123,7 @@ pub(crate) fn completion_item_kind( CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD, CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE, + CompletionItemKind::Expression => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::SymbolKind(symbol) => match symbol { SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION, SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT, @@ -929,6 +930,16 @@ fn merge_text_and_snippet_edits( let mut edits: Vec = vec![]; let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable(); let text_edits = edit.into_iter(); + // offset to go from the final source location to the original source location + let mut source_text_offset = 0i32; + + let offset_range = |range: TextRange, offset: i32| -> TextRange { + // map the snippet range from the target location into the original source location + let start = u32::from(range.start()).checked_add_signed(offset).unwrap_or(0); + let end = u32::from(range.end()).checked_add_signed(offset).unwrap_or(0); + + TextRange::new(start.into(), end.into()) + }; for current_indel in text_edits { let new_range = { @@ -937,10 +948,17 @@ fn merge_text_and_snippet_edits( TextRange::at(current_indel.delete.start(), insert_len) }; + // figure out how much this Indel will shift future ranges from the initial source + let offset_adjustment = + u32::from(current_indel.delete.len()) as i32 - u32::from(new_range.len()) as i32; + // insert any snippets before the text edit - for (snippet_index, snippet_range) in - snippets.take_while_ref(|(_, range)| range.end() < new_range.start()) - { + for (snippet_index, snippet_range) in snippets.peeking_take_while(|(_, range)| { + offset_range(*range, source_text_offset).end() < new_range.start() + }) { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "placeholder range {:?} is before current text edit range {:?}", @@ -953,22 +971,23 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - edits.push(SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + )) } - if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) { + if snippets.peek().is_some_and(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) { // at least one snippet edit intersects this text edit, // so gather all of the edits that intersect this text edit let mut all_snippets = snippets - .take_while_ref(|(_, range)| new_range.intersect(*range).is_some()) + .peeking_take_while(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) + .map(|(tabstop, range)| (tabstop, offset_range(range, source_text_offset))) .collect_vec(); // ensure all of the ranges are wholly contained inside of the new range @@ -979,40 +998,59 @@ fn merge_text_and_snippet_edits( ) }); - let mut text_edit = text_edit(line_index, current_indel); + let mut new_text = current_indel.insert; - // escape out snippet text - stdx::replace(&mut text_edit.new_text, '\\', r"\\"); - stdx::replace(&mut text_edit.new_text, '$', r"\$"); + // find which snippet bits need to be escaped + let escape_places = new_text + .rmatch_indices(['\\', '$', '{', '}']) + .map(|(insert, _)| insert) + .collect_vec(); + let mut escape_places = escape_places.into_iter().peekable(); + let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { + for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { + new_text.insert(before, '\\'); + } + }; - // ...and apply! + // insert snippets, and escaping any needed bits along the way for (index, range) in all_snippets.iter().rev() { - let start = (range.start() - new_range.start()).into(); - let end = (range.end() - new_range.start()).into(); + let text_range = range - new_range.start(); + let (start, end) = (text_range.start().into(), text_range.end().into()); if range.is_empty() { - text_edit.new_text.insert_str(start, &format!("${index}")); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${index}")); } else { - text_edit.new_text.insert(end, '}'); - text_edit.new_text.insert_str(start, &format!("${{{index}:")); + escape_prior_bits(&mut new_text, end); + new_text.insert(end, '}'); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${{{index}:")); } } - edits.push(SnippetTextEdit { - range: text_edit.range, - new_text: text_edit.new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + // escape any remaining bits + escape_prior_bits(&mut new_text, 0); + + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: new_text, delete: current_indel.delete }, + )) } else { // snippet edit was beyond the current one // since it wasn't consumed, it's available for the next pass edits.push(snippet_text_edit(line_index, false, current_indel)); } + + // update the final source -> initial source mapping offset + source_text_offset += offset_adjustment; } // insert any remaining tabstops edits.extend(snippets.map(|(snippet_index, snippet_range)| { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "found placeholder snippet {:?} without a text edit", @@ -1023,15 +1061,11 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - } + snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + ) })); edits @@ -1658,15 +1692,44 @@ fn bar(_: usize) {} assert!(!docs.contains("use crate::bar")); } + #[track_caller] fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) { - let text = r#"/* place to put all ranges in */"#; + check_rendered_snippets_in_source( + r"/* place to put all ranges in */", + edit, + snippets, + expect, + ); + } + + #[track_caller] + fn check_rendered_snippets_in_source( + ra_fixture: &str, + edit: TextEdit, + snippets: SnippetEdit, + expect: Expect, + ) { + let source = stdx::trim_indent(ra_fixture); + let endings = if source.contains('\r') { LineEndings::Dos } else { LineEndings::Unix }; let line_index = LineIndex { - index: Arc::new(ide::LineIndex::new(text)), - endings: LineEndings::Unix, + index: Arc::new(ide::LineIndex::new(&source)), + endings, encoding: PositionEncoding::Utf8, }; let res = merge_text_and_snippet_edits(&line_index, edit, snippets); + + // Ensure that none of the ranges overlap + { + let mut sorted = res.clone(); + sorted.sort_by_key(|edit| (edit.range.start, edit.range.end)); + let disjoint_ranges = sorted + .iter() + .zip(sorted.iter().skip(1)) + .all(|(l, r)| l.range.end <= r.range.start || l == r); + assert!(disjoint_ranges, "ranges overlap for {res:#?}"); + } + expect.assert_debug_eq(&res); } @@ -1811,7 +1874,8 @@ fn bar(_: usize) {} let mut edit = TextEdit::builder(); edit.insert(0.into(), "abc".to_owned()); let edit = edit.finish(); - let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]); + // Note: tabstops are positioned in the source where all text edits have been applied + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); check_rendered_snippets( edit, @@ -1928,8 +1992,9 @@ fn bar(_: usize) {} edit.insert(0.into(), "abc".to_owned()); edit.insert(7.into(), "abc".to_owned()); let edit = edit.finish(); + // Note: tabstops are positioned in the source where all text edits have been applied let snippets = - SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]); + SnippetEdit::new(vec![Snippet::Tabstop(7.into()), Snippet::Tabstop(7.into())]); check_rendered_snippets( edit, @@ -2085,13 +2150,502 @@ fn bar(_: usize) {} fn snippet_rendering_escape_snippet_bits() { // only needed for snippet formats let mut edit = TextEdit::builder(); - edit.insert(0.into(), r"abc\def$".to_owned()); - edit.insert(8.into(), r"ghi\jkl$".to_owned()); + edit.insert(0.into(), r"$ab{}$c\def".to_owned()); + edit.insert(8.into(), r"ghi\jk<-check_insert_here$".to_owned()); + edit.insert(10.into(), r"a\\b\\c{}$".to_owned()); let edit = edit.finish(); - let snippets = - SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]); + let snippets = SnippetEdit::new(vec![ + Snippet::Placeholder(TextRange::new(1.into(), 9.into())), + Snippet::Tabstop(25.into()), + ]); check_rendered_snippets( + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 8, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "ghi\\\\jk$0<-check_insert_here\\$", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 10, + }, + }, + new_text: "a\\\\b\\\\c{}$", + insert_text_format: None, + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(43.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(51.into(), 59.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(43.into(), 51.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_between_text_edits() { + // inserting between edits, tabstop should be at (1, 14) + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(58.into(), 90.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + // add an extra space between `ProcMacro` and `{` to insert the tabstop at + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 14, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 15, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_after_text_edits() { + // inserting after edits, tabstop should be before the closing curly of the fn + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(109.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 4, + character: 0, + }, + end: Position { + line: 4, + character: 0, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_handle_dos_line_endings() { + // unix -> dos conversion should be handled after placing snippets + let mut edit = TextEdit::builder(); + edit.insert(6.into(), "\n\n->".to_owned()); + + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); + + check_rendered_snippets_in_source( + "yeah\r\n<-tabstop here", edit, snippets, expect![[r#" @@ -2099,38 +2653,23 @@ fn bar(_: usize) {} SnippetTextEdit { range: Range { start: Position { - line: 0, + line: 1, character: 0, }, end: Position { - line: 0, + line: 1, character: 0, }, }, - new_text: "${0:abc}\\\\def\\$", + new_text: "\r\n\r\n->$0", insert_text_format: Some( Snippet, ), annotation_id: None, }, - SnippetTextEdit { - range: Range { - start: Position { - line: 0, - character: 8, - }, - end: Position { - line: 0, - character: 8, - }, - }, - new_text: "ghi\\jkl$", - insert_text_format: None, - annotation_id: None, - }, ] "#]], - ); + ) } // `Url` is not able to parse windows paths on unix machines. diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 88660db7e93..72f6d0fde5f 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -8,12 +8,10 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use flycheck::FlycheckHandle; -use ide_db::base_db::{SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; use stdx::thread::ThreadIntent; -use triomphe::Arc; use vfs::FileId; use crate::{ @@ -77,6 +75,7 @@ impl fmt::Display for Event { #[derive(Debug)] pub(crate) enum QueuedTask { CheckIfIndexed(lsp_types::Url), + CheckProcMacroSources(Vec), } #[derive(Debug)] @@ -89,6 +88,7 @@ pub(crate) enum Task { FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + BuildDepsHaveChanged, } #[derive(Debug)] @@ -337,7 +337,7 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::restart_workspace); + self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); @@ -358,9 +358,7 @@ impl GlobalState { } // Refresh inlay hints if the client supports it. - if (self.send_hint_refresh_query || self.proc_macro_changed) - && self.config.inlay_hints_refresh() - { + if self.send_hint_refresh_query && self.config.inlay_hints_refresh() { self.send_request::((), |_, _| ()); self.send_hint_refresh_query = false; } @@ -555,16 +553,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { tracing::error!("FetchWorkspaceError:\n{e}"); } - - let old = Arc::clone(&self.workspaces); self.switch_workspaces("fetched workspace".to_owned()); - let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); - - if self.config.run_build_scripts() && workspaces_updated { - self.fetch_build_data_queue - .request_op("workspace updated".to_owned(), ()); - } - (Progress::End, None) } }; @@ -608,6 +597,7 @@ impl GlobalState { self.report_progress("Loading", state, msg, None, None); } } + Task::BuildDepsHaveChanged => self.build_deps_changed = true, } } @@ -686,6 +676,25 @@ impl GlobalState { } }); } + QueuedTask::CheckProcMacroSources(modified_rust_files) => { + let crate_graph = self.analysis_host.raw_database().crate_graph(); + let snap = self.snapshot(); + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + move |sender| { + if modified_rust_files.into_iter().any(|file_id| { + // FIXME: Check whether these files could be build script related + match snap.analysis.crates_for(file_id) { + Ok(crates) => { + crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) + } + _ => false, + } + }) { + sender.send(Task::BuildDepsHaveChanged).unwrap(); + } + } + }); + } } } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7bd2877b00c..5895459d1fc 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -17,8 +17,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; +use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -28,7 +29,7 @@ use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; -use vfs::{AbsPath, ChangeKind}; +use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -83,7 +84,7 @@ impl GlobalState { } if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("linked projects changed".to_owned(), false) + self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -106,9 +107,11 @@ impl GlobalState { }; let mut message = String::new(); - if self.proc_macro_changed { + if self.build_deps_changed { status.health = lsp_ext::Health::Warning; - message.push_str("Proc-macros have changed and need to be rebuilt.\n\n"); + message.push_str( + "Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n", + ); } if self.fetch_build_data_error().is_err() { status.health = lsp_ext::Health::Warning; @@ -234,7 +237,6 @@ impl GlobalState { it.clone(), cargo_config.target.as_deref(), &cargo_config.extra_env, - None, )) } }) @@ -300,13 +302,13 @@ impl GlobalState { pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec) { tracing::info!(%cause, "will load proc macros"); - let dummy_replacements = self.config.dummy_replacements().clone(); + let ignored_proc_macros = self.config.ignored_proc_macros().clone(); let proc_macro_clients = self.proc_macro_clients.clone(); self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap(); - let dummy_replacements = &dummy_replacements; + let ignored_proc_macros = &ignored_proc_macros; let progress = { let sender = sender.clone(); &move |msg| { @@ -334,7 +336,12 @@ impl GlobalState { crate_name .as_deref() .and_then(|crate_name| { - dummy_replacements.get(crate_name).map(|v| &**v) + ignored_proc_macros.iter().find_map( + |(name, macros)| { + eq_ignore_underscore(name, crate_name) + .then_some(&**macros) + }, + ) }) .unwrap_or_default(), ) @@ -404,6 +411,10 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } + if self.build_deps_changed && self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); + } // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -415,6 +426,11 @@ impl GlobalState { // we don't care about build-script results, they are stale. // FIXME: can we abort the build scripts here? self.workspaces = Arc::new(workspaces); + + if self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ()); + } } if let FilesWatcher::Client = self.config.files().watcher { @@ -464,8 +480,23 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; + let env = + match ws { + ProjectWorkspace::Cargo { cargo_config_extra_env, sysroot, .. } => { + cargo_config_extra_env + .iter() + .chain(self.config.extra_env()) + .map(|(a, b)| (a.clone(), b.clone())) + .chain(sysroot.as_ref().map(|it| { + ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string()) + })) + .collect() + } + _ => Default::default(), + }; tracing::info!("Using proc-macro server at {path}"); - ProcMacroServer::spawn(path.clone()).map_err(|err| { + + ProcMacroServer::spawn(path.clone(), &env).map_err(|err| { tracing::error!( "Failed to run proc-macro server from path {path}, error: {err:?}", ); @@ -494,15 +525,15 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - // Create crate graph from all the workspaces - let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = { + { + // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; // crate graph construction relies on these paths, record them so when one of them gets // deleted or created we trigger a reconstruction of the crate graph let mut crate_graph_file_dependencies = FxHashSet::default(); - let mut load = |path: &AbsPath| { + let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); @@ -517,32 +548,26 @@ impl GlobalState { } }; - let mut crate_graph = CrateGraph::default(); - let mut proc_macros = Vec::default(); - for ws in &**self.workspaces { - let (other, mut crate_proc_macros) = - ws.to_crate_graph(&mut load, self.config.extra_env()); - crate_graph.extend(other, &mut crate_proc_macros, |_| {}); - proc_macros.push(crate_proc_macros); + let (crate_graph, proc_macro_paths, layouts, toolchains) = + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); + + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } - (crate_graph, proc_macros, crate_graph_file_dependencies) - }; - - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; } - change.set_crate_graph(crate_graph); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.process_changes(); - self.reload_flycheck(); } @@ -605,6 +630,7 @@ impl GlobalState { 0, Box::new(move |msg| sender.send(msg).unwrap()), config, + None, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { @@ -612,23 +638,32 @@ impl GlobalState { .iter() .enumerate() .filter_map(|(id, w)| match w { - ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())), - ProjectWorkspace::Json { project, .. } => { + ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some(( + id, + cargo.workspace_root(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), + ProjectWorkspace::Json { project, sysroot, .. } => { // Enable flychecks for json projects if a custom flycheck command was supplied // in the workspace configuration. match config { - FlycheckConfig::CustomCommand { .. } => Some((id, project.path())), + FlycheckConfig::CustomCommand { .. } => Some(( + id, + project.path(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), _ => None, } } ProjectWorkspace::DetachedFiles { .. } => None, }) - .map(|(id, root)| { + .map(|(id, root, sysroot_root)| { let sender = sender.clone(); FlycheckHandle::spawn( id, Box::new(move |msg| sender.send(msg).unwrap()), config.clone(), + sysroot_root, root.to_path_buf(), ) }) @@ -639,6 +674,69 @@ impl GlobalState { } } +// FIXME: Move this into load-cargo? +pub fn ws_to_crate_graph( + workspaces: &[ProjectWorkspace], + extra_env: &FxHashMap, + mut load: impl FnMut(&AbsPath) -> Option, +) -> ( + CrateGraph, + Vec, AbsPathBuf), String>>>, + Vec, Arc>>, + Vec>, +) { + let mut crate_graph = CrateGraph::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); + for ws in workspaces { + let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, cg_data), (_o_id, o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + let id = cg_id.into_raw().into_u32() as usize; + layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); + } + (crate_graph, proc_macro_paths, layouts, toolchains) +} + pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; @@ -683,3 +781,18 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) } false } + +/// Similar to [`str::eq_ignore_ascii_case`] but instead of ignoring +/// case, we say that `-` and `_` are equal. +fn eq_ignore_underscore(s1: &str, s2: &str) -> bool { + if s1.len() != s2.len() { + return false; + } + + s1.as_bytes().iter().zip(s2.as_bytes()).all(|(c1, c2)| { + let c1_underscore = c1 == &b'_' || c1 == &b'-'; + let c2_underscore = c2 == &b'_' || c2 == &b'-'; + + c1 == c2 || (c1_underscore && c2_underscore) + }) +} diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs new file mode 100644 index 00000000000..efd42fadf7e --- /dev/null +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -0,0 +1,118 @@ +use std::path::PathBuf; + +use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use rust_analyzer::ws_to_crate_graph; +use rustc_hash::FxHashMap; +use serde::de::DeserializeOwned; +use vfs::{AbsPathBuf, FileId}; + +fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { + let meta = get_test_json_file(file); + let cargo_workspace = CargoWorkspace::new(meta); + ProjectWorkspace::Cargo { + cargo: cargo_workspace, + build_scripts: WorkspaceBuildScripts::default(), + sysroot: Ok(get_fake_sysroot()), + rustc: Err(None), + rustc_cfg: Vec::new(), + cfg_overrides: Default::default(), + toolchain: None, + target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), + } +} + +fn get_test_json_file(file: &str) -> T { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let file = base.join("tests/test_data").join(file); + let data = std::fs::read_to_string(file).unwrap(); + let mut json = data.parse::().unwrap(); + fixup_paths(&mut json); + return serde_json::from_value(json).unwrap(); + + fn fixup_paths(val: &mut serde_json::Value) { + match val { + serde_json::Value::String(s) => replace_root(s, true), + serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths), + serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths), + serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => { + } + } + } +} + +fn replace_root(s: &mut String, direction: bool) { + if direction { + let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" }; + *s = s.replace("$ROOT$", root) + } else { + let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" }; + *s = s.replace(root, "$ROOT$") + } +} + +fn get_fake_sysroot_path() -> PathBuf { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + base.join("../project-model/test_data/fake-sysroot") +} + +fn get_fake_sysroot() -> Sysroot { + let sysroot_path = get_fake_sysroot_path(); + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) +} + +#[test] +fn test_deduplicate_origin_dev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} + +#[test] +fn test_deduplicate_origin_dev_rev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 79ae0c30cfc..960f5b531d4 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -911,20 +911,18 @@ fn root_contains_symlink_out_dirs_check() { #[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn resolve_proc_macro() { use expect_test::expect; + use vfs::AbsPathBuf; if skip_slow_tests() { return; } - // skip using the sysroot config as to prevent us from loading the sysroot sources - let mut rustc = std::process::Command::new(toolchain::rustc()); - rustc.args(["--print", "sysroot"]); - let output = rustc.output().unwrap(); - let sysroot = - vfs::AbsPathBuf::try_from(std::str::from_utf8(&output.stdout).unwrap().trim()).unwrap(); + let sysroot = project_model::Sysroot::discover_no_source( + &AbsPathBuf::assert(std::env::current_dir().unwrap()), + &Default::default(), + ) + .unwrap(); - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - let proc_macro_server_path = sysroot.join("libexec").join(&standalone_server_name); + let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); let server = Project::with_fixture( r###" diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index d02cb45b8e3..392a7170207 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -101,8 +101,13 @@ impl Project<'_> { }; }); - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(self.fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); assert!(toolchain.is_none()); diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 3e38fc3ebcd..78da4487d4c 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -9,27 +9,6 @@ use xshell::Shell; #[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_code_formatting() { - let sh = &Shell::new().unwrap(); - sh.change_dir(sourcegen::project_root()); - - let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap(); - if !out.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ) - } - - let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run(); - if res.is_err() { - let _ = cmd!(sh, "rustup run stable cargo fmt").run(); - } - res.unwrap() -} - #[test] fn check_lsp_extensions_docs() { let sh = &Shell::new().unwrap(); diff --git a/crates/project-model/test_data/deduplication_crate_graph_A.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_A.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json diff --git a/crates/project-model/test_data/deduplication_crate_graph_B.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_B.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json diff --git a/crates/salsa/src/doctest.rs b/crates/salsa/src/doctest.rs deleted file mode 100644 index 29a80663567..00000000000 --- a/crates/salsa/src/doctest.rs +++ /dev/null @@ -1,115 +0,0 @@ -//! -#![allow(dead_code)] - -/// Test that a database with a key/value that is not `Send` will, -/// indeed, not be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Rc; -/// fn no_send_sync_key(&self, key: Rc) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { -/// Rc::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// storage: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_send_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will not -/// be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// use std::cell::Cell; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will -/// not be `Sync`. -/// -/// ```compile_fail,E0277 -/// use std::cell::Cell; -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_sync(_: T) { } -/// -/// fn assert_send() { -/// is_sync(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_sync() {} diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 2d58beafb2a..668dcfd925d 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -11,7 +11,6 @@ //! from previous invocations as appropriate. mod derived; -mod doctest; mod durability; mod hash; mod input; diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index c3010d090c6..c3d8e97c436 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -367,6 +367,7 @@ Expr = | RecordExpr | RefExpr | ReturnExpr +| BecomeExpr | TryExpr | TupleExpr | WhileExpr @@ -528,6 +529,9 @@ MatchGuard = ReturnExpr = Attr* 'return' Expr? +BecomeExpr = + Attr* 'become' Expr + YieldExpr = Attr* 'yield' Expr? @@ -610,7 +614,7 @@ TypeBoundList = TypeBound = Lifetime -| ('?' | '~' 'const')? Type +| ('~' 'const' | 'const')? 'async'? '?'? Type //************************// // Patterns // diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index bc9c54d0b73..41d33c457ce 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -1007,20 +1007,24 @@ impl ast::IdentPat { } pub trait HasVisibilityEdit: ast::HasVisibility { - fn set_visibility(&self, visibility: ast::Visibility) { - match self.visibility() { - Some(current_visibility) => { - ted::replace(current_visibility.syntax(), visibility.syntax()) - } - None => { - let vis_before = self - .syntax() - .children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); + fn set_visibility(&self, visibility: Option) { + if let Some(visibility) = visibility { + match self.visibility() { + Some(current_visibility) => { + ted::replace(current_visibility.syntax(), visibility.syntax()) + } + None => { + let vis_before = self + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); - ted::insert(ted::Position::before(vis_before), visibility.syntax()); + ted::insert(ted::Position::before(vis_before), visibility.syntax()); + } } + } else if let Some(visibility) = self.visibility() { + ted::remove(visibility.syntax()); } } } diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 6c86e591044..75971861aa8 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1095,6 +1095,16 @@ impl ReturnExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BecomeExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for BecomeExpr {} +impl BecomeExpr { + pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TryExpr { pub(crate) syntax: SyntaxNode, @@ -1400,9 +1410,10 @@ pub struct TypeBound { } impl TypeBound { pub fn lifetime(&self) -> Option { support::child(&self.syntax) } - pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn tilde_token(&self) -> Option { support::token(&self.syntax, T![~]) } pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn ty(&self) -> Option { support::child(&self.syntax) } } @@ -1633,6 +1644,7 @@ pub enum Expr { RecordExpr(RecordExpr), RefExpr(RefExpr), ReturnExpr(ReturnExpr), + BecomeExpr(BecomeExpr), TryExpr(TryExpr), TupleExpr(TupleExpr), WhileExpr(WhileExpr), @@ -2792,6 +2804,17 @@ impl AstNode for ReturnExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for BecomeExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for TryExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3540,6 +3563,9 @@ impl From for Expr { impl From for Expr { fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) } } +impl From for Expr { + fn from(node: BecomeExpr) -> Expr { Expr::BecomeExpr(node) } +} impl From for Expr { fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } } @@ -3593,6 +3619,7 @@ impl AstNode for Expr { | RECORD_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -3632,6 +3659,7 @@ impl AstNode for Expr { RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }), RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }), + BECOME_EXPR => Expr::BecomeExpr(BecomeExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), @@ -3673,6 +3701,7 @@ impl AstNode for Expr { Expr::RecordExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax, Expr::ReturnExpr(it) => &it.syntax, + Expr::BecomeExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax, Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, @@ -4150,6 +4179,7 @@ impl AstNode for AnyHasAttrs { | RANGE_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -4851,6 +4881,11 @@ impl std::fmt::Display for ReturnExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for BecomeExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for TryExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 120d801c8d1..02246fc3291 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -1147,7 +1147,7 @@ pub mod tokens { pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n", + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", ) }); diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 6e5e4127f4d..1bc1ef8434f 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -569,6 +569,26 @@ impl fmt::Display for NameOrNameRef { } } +impl ast::AstNode for NameOrNameRef { + fn can_cast(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF) + } + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + SyntaxKind::NAME => NameOrNameRef::Name(ast::Name { syntax }), + SyntaxKind::NAME_REF => NameOrNameRef::NameRef(ast::NameRef { syntax }), + _ => return None, + }; + Some(res) + } + fn syntax(&self) -> &SyntaxNode { + match self { + NameOrNameRef::NameRef(it) => it.syntax(), + NameOrNameRef::Name(it) => it.syntax(), + } + } +} + impl NameOrNameRef { pub fn text(&self) -> TokenText<'_> { match self { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 9ddf5a0a980..9131cd2f179 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -130,8 +130,8 @@ impl Expr { // ContinueExpr(_) => (0, 0), - ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) - | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), + ClosureExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | BreakExpr(_) | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), RangeExpr(_) => (5, 5), @@ -288,6 +288,7 @@ impl Expr { PrefixExpr(e) => e.op_token(), RefExpr(e) => e.amp_token(), ReturnExpr(e) => e.return_token(), + BecomeExpr(e) => e.become_token(), TryExpr(e) => e.question_mark_token(), YieldExpr(e) => e.yield_token(), YeetExpr(e) => e.do_token(), @@ -316,7 +317,8 @@ impl Expr { // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... BinExpr(_) | RangeExpr(_) | BreakExpr(_) | ContinueExpr(_) | PrefixExpr(_) - | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | LetExpr(_) => self + | RefExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self .syntax() .parent() .and_then(Expr::cast) diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 960889b7421..b755de86d32 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -27,11 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; #[cfg(feature = "in-rust-tree")] extern crate rustc_lexer; -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod parsing; mod ptr; mod syntax_error; diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index 341bda892ba..8221c577892 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -67,8 +67,9 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { keywords: &[ "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", - "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", - "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", + "match", "mod", "move", "mut", "pub", "ref", "return", "become", "self", "Self", "static", + "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", + "yield", ], contextual_keywords: &[ "auto", @@ -154,6 +155,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "BLOCK_EXPR", "STMT_LIST", "RETURN_EXPR", + "BECOME_EXPR", "YIELD_EXPR", "YEET_EXPR", "LET_EXPR", diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index 28e757e81bb..e118262b4ed 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -1,10 +1,9 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, ops::Not, str::FromStr, sync}; +use std::{iter, mem, ops::Not, str::FromStr, sync}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, - VfsPath, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -118,8 +117,14 @@ impl ChangeFixture { ra_fixture: &str, mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(ra_fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout, + } = FixtureWithProjectMeta::parse(ra_fixture); + let target_data_layout = Ok(target_data_layout.into()); let toolchain = Some({ let channel = toolchain.as_deref().unwrap_or("stable"); Version::parse(&format!("1.76.0-{channel}")).unwrap() @@ -131,7 +136,6 @@ impl ChangeFixture { let mut crates = FxHashMap::default(); let mut crate_deps = Vec::new(); let mut default_crate_root: Option = None; - let mut default_target_data_layout: Option = None; let mut default_cfg = CfgOptions::default(); let mut default_env = Env::new_for_test_fixture(); @@ -187,11 +191,6 @@ impl ChangeFixture { meta.env, false, origin, - meta.target_data_layout - .as_deref() - .map(From::from) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -205,7 +204,6 @@ impl ChangeFixture { default_crate_root = Some(file_id); default_cfg.extend(meta.cfg.into_iter()); default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); - default_target_data_layout = meta.target_data_layout; } source_change.change_file(file_id, Some(text.into())); @@ -228,10 +226,6 @@ impl ChangeFixture { default_env, false, CrateOrigin::Local { repo: None, name: None }, - default_target_data_layout - .map(|it| it.into()) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -240,20 +234,11 @@ impl ChangeFixture { crate_graph .add_dep( from_id, - Dependency::with_prelude( - CrateName::new(&to).unwrap(), - to_id, - prelude, - DependencyKind::Normal, - ), + Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude), ) .unwrap(); } } - let target_layout = crate_graph.iter().next().map_or_else( - || Err("target_data_layout unset".into()), - |it| crate_graph[it].target_layout.clone(), - ); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -277,20 +262,11 @@ impl ChangeFixture { Env::new_for_test_fixture(), false, CrateOrigin::Lang(LangCrateOrigin::Core), - target_layout.clone(), - toolchain.clone(), ); for krate in all_crates { crate_graph - .add_dep( - krate, - Dependency::new( - CrateName::new("core").unwrap(), - core_crate, - DependencyKind::Normal, - ), - ) + .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate)) .unwrap(); } } @@ -322,8 +298,6 @@ impl ChangeFixture { Env::new_for_test_fixture(), true, CrateOrigin::Local { repo: None, name: None }, - target_layout, - toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -331,11 +305,7 @@ impl ChangeFixture { crate_graph .add_dep( krate, - Dependency::new( - CrateName::new("proc_macros").unwrap(), - proc_macros_crate, - DependencyKind::Normal, - ), + Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate), ) .unwrap(); } @@ -346,17 +316,20 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - source_change.set_roots(roots); - source_change.set_crate_graph(crate_graph); - ChangeFixture { - file_position, - files, - change: Change { - source_change, - proc_macros: proc_macros.is_empty().not().then_some(proc_macros), - }, - } + let mut change = Change { + source_change, + proc_macros: proc_macros.is_empty().not().then_some(proc_macros), + toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()), + target_data_layouts: Some( + iter::repeat(target_data_layout).take(crate_graph.len()).collect(), + ), + }; + + change.source_change.set_roots(roots); + change.source_change.set_crate_graph(crate_graph); + + ChangeFixture { file_position, files, change } } } @@ -374,6 +347,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { name: "identity".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -388,6 +362,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { name: "DeriveIdentity".into(), kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -402,6 +377,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { name: "input_replace".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), + disabled: false, }, ), ( @@ -416,6 +392,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { name: "mirror".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(MirrorProcMacroExpander), + disabled: false, }, ), ( @@ -430,6 +407,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { name: "shorten".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(ShortenProcMacroExpander), + disabled: false, }, ), ] @@ -470,7 +448,6 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, - target_data_layout: Option, } impl FileMeta { @@ -502,7 +479,6 @@ impl FileMeta { edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), introduce_new_source_root, - target_data_layout: f.target_data_layout, } } } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 595281336d5..7e34c361899 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -126,11 +126,6 @@ pub struct Fixture { /// /// Syntax: `library` pub library: bool, - /// Specifies LLVM data layout to be used. - /// - /// You probably don't want to manually specify this. See LLVM manual for the - /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout - pub target_data_layout: Option, /// Actual file contents. All meta comments are stripped. pub text: String, } @@ -145,6 +140,11 @@ pub struct FixtureWithProjectMeta { pub mini_core: Option, pub proc_macro_names: Vec, pub toolchain: Option, + /// Specifies LLVM data layout to be used. + /// + /// You probably don't want to manually specify this. See LLVM manual for the + /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout + pub target_data_layout: String, } impl FixtureWithProjectMeta { @@ -172,6 +172,8 @@ impl FixtureWithProjectMeta { let fixture = trim_indent(ra_fixture); let mut fixture = fixture.as_str(); let mut toolchain = None; + let mut target_data_layout = + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(); let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; @@ -182,6 +184,12 @@ impl FixtureWithProjectMeta { fixture = remain; } + if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") { + let (meta, remain) = meta.split_once('\n').unwrap(); + target_data_layout = meta.trim().to_owned(); + fixture = remain; + } + if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); @@ -225,7 +233,7 @@ impl FixtureWithProjectMeta { } } - Self { fixture: res, mini_core, proc_macro_names, toolchain } + Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout } } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -245,9 +253,6 @@ impl FixtureWithProjectMeta { let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; let mut library = false; - let mut target_data_layout = Some( - "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(), - ); for component in components { if component == "library" { library = true; @@ -284,7 +289,6 @@ impl FixtureWithProjectMeta { } } "new_source_root" => introduce_new_source_root = Some(value.to_owned()), - "target_data_layout" => target_data_layout = Some(value.to_owned()), _ => panic!("bad component: {component:?}"), } } @@ -307,7 +311,6 @@ impl FixtureWithProjectMeta { env, introduce_new_source_root, library, - target_data_layout, } } } @@ -476,16 +479,21 @@ fn parse_fixture_checks_further_indented_metadata() { #[test] fn parse_fixture_gets_full_meta() { - let FixtureWithProjectMeta { fixture: parsed, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse( - r#" + let FixtureWithProjectMeta { + fixture: parsed, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse( + r#" //- toolchain: nightly //- proc_macros: identity //- minicore: coerce_unsized //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo mod m; "#, - ); + ); assert_eq!(toolchain, Some("nightly".to_owned())); assert_eq!(proc_macro_names, vec!["identity".to_owned()]); assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]); diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 23a3a7e0afa..f125792d125 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -60,6 +60,8 @@ //! try: infallible //! unpin: sized //! unsize: sized +//! todo: panic +//! unimplemented: panic #![rustc_coherence_is_core] @@ -927,6 +929,10 @@ pub mod fmt { use crate::mem::transmute; unsafe { Argument { formatter: transmute(f), value: transmute(x) } } } + + pub fn new_display<'b, T: Display>(x: &'b T) -> Argument<'_> { + Self::new(x, Display::fmt) + } } #[lang = "format_alignment"] @@ -1438,6 +1444,33 @@ mod macros { // endregion:fmt + // region:todo + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! todo { + () => { + $crate::panicking::panic("not yet implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:todo + + // region:unimplemented + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! unimplemented { + () => { + $crate::panicking::panic("not implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:unimplemented + + // region:derive pub(crate) mod builtin { #[rustc_builtin_macro] diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 997f339edc4..ae71b6700c0 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -2,7 +2,41 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -use std::{env, iter, path::PathBuf}; +use std::{ + env, iter, + path::{Path, PathBuf}, +}; + +#[derive(Copy, Clone)] +pub enum Tool { + Cargo, + Rustc, + Rustup, + Rustfmt, +} + +impl Tool { + pub fn path(self) -> PathBuf { + get_path_for_executable(self.name()) + } + + pub fn path_in(self, path: &Path) -> Option { + probe_for_binary(path.join(self.name())) + } + + pub fn path_in_or_discover(self, path: &Path) -> PathBuf { + probe_for_binary(path.join(self.name())).unwrap_or_else(|| self.path()) + } + + pub fn name(self) -> &'static str { + match self { + Tool::Cargo => "cargo", + Tool::Rustc => "rustc", + Tool::Rustup => "rustup", + Tool::Rustfmt => "rustfmt", + } + } +} pub fn cargo() -> PathBuf { get_path_for_executable("cargo") @@ -47,7 +81,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { if let Some(mut path) = get_cargo_home() { path.push("bin"); path.push(executable_name); - if let Some(path) = probe(path) { + if let Some(path) = probe_for_binary(path) { return path; } } @@ -57,7 +91,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { fn lookup_in_path(exec: &str) -> bool { let paths = env::var_os("PATH").unwrap_or_default(); - env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() + env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe_for_binary).is_some() } fn get_cargo_home() -> Option { @@ -73,7 +107,7 @@ fn get_cargo_home() -> Option { None } -fn probe(path: PathBuf) -> Option { +pub fn probe_for_binary(path: PathBuf) -> Option { let with_extension = match env::consts::EXE_EXTENSION { "" => None, it => Some(path.with_extension(it)), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 9004bff53a8..eec88f80688 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -152,6 +152,7 @@ pub struct Punct { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Spacing { Alone, + /// Whether the following token is joint to the current one. Joint, } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index a86ef709411..da7654b0f64 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -71,7 +71,7 @@ cargo check --quiet --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`):: +[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `true`):: + -- Rerun proc-macros building/build-scripts running when proc-macro @@ -234,6 +234,11 @@ each of them, with the working directory being the workspace root by changing `#rust-analyzer.check.invocationStrategy#` and `#rust-analyzer.check.invocationLocation#`. +If `$saved_file` is part of the command, rust-analyzer will pass +the absolute path of the saved file to the provided command. This is +intended to be used with non-Cargo build systems. +Note that `$saved_file` is experimental and may be removed in the futureg. + An example command would be: ```bash @@ -343,6 +348,11 @@ Default: ---- Custom completion snippets. +-- +[[rust-analyzer.completion.termSearch.enable]]rust-analyzer.completion.termSearch.enable (default: `false`):: ++ +-- +Whether to enable term search based snippets like `Some(foo.bar().baz())`. -- [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: + @@ -793,11 +803,6 @@ Exclude imports from find-all-references. -- Exclude tests from find-all-references. -- -[[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`):: -+ --- -Allow renaming of items not belonging to the loaded workspaces. --- [[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`):: + -- diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore index 09dc27056b3..5c48205694f 100644 --- a/editors/code/.vscodeignore +++ b/editors/code/.vscodeignore @@ -12,3 +12,6 @@ !ra_syntax_tree.tmGrammar.json !server !README.md +!language-configuration-rustdoc.json +!rustdoc-inject.json +!rustdoc.json diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json new file mode 100644 index 00000000000..c905d3b6067 --- /dev/null +++ b/editors/code/language-configuration-rustdoc.json @@ -0,0 +1,37 @@ +{ + "comments": { + "blockComment": [""] + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "colorizedBracketPairs": [], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" } + ], + "surroundingPairs": [ + ["(", ")"], + ["[", "]"], + ["`", "`"], + ["_", "_"], + ["*", "*"], + ["{", "}"], + ["'", "'"], + ["\"", "\""] + ], + "folding": { + "offSide": true, + "markers": { + "start": "^\\s*", + "end": "^\\s*" + } + }, + "wordPattern": { + "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", + "flags": "ug" + } +} diff --git a/editors/code/package.json b/editors/code/package.json index b474471e5a4..3a1df5a2f90 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -68,7 +68,9 @@ "typescript": "^5.1.6" }, "activationEvents": [ + "workspaceContains:Cargo.toml", "workspaceContains:*/Cargo.toml", + "workspaceContains:rust-project.json", "workspaceContains:*/rust-project.json" ], "main": "./out/main", @@ -588,7 +590,7 @@ }, "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", - "default": false, + "default": true, "type": "boolean" }, "rust-analyzer.cargo.buildScripts.useRustcWrapper": { @@ -775,7 +777,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the futureg.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", @@ -902,6 +904,11 @@ }, "type": "object" }, + "rust-analyzer.completion.termSearch.enable": { + "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.", + "default": false, + "type": "boolean" + }, "rust-analyzer.diagnostics.disabled": { "markdownDescription": "List of rust-analyzer diagnostics to disable.", "default": [], @@ -1520,11 +1527,6 @@ "default": false, "type": "boolean" }, - "rust-analyzer.rename.allowExternalItems": { - "markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.", - "default": false, - "type": "boolean" - }, "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", "default": null, @@ -1756,6 +1758,13 @@ "rs" ], "configuration": "language-configuration.json" + }, + { + "id": "rustdoc", + "extensions": [ + ".rustdoc" + ], + "configuration": "./language-configuration-rustdoc.json" } ], "grammars": [ @@ -1763,6 +1772,27 @@ "language": "ra_syntax_tree", "scopeName": "source.ra_syntax_tree", "path": "ra_syntax_tree.tmGrammar.json" + }, + { + "language": "rustdoc", + "scopeName": "text.html.markdown.rustdoc", + "path": "rustdoc.json", + "embeddedLanguages": { + "meta.embedded.block.html": "html", + "meta.embedded.block.markdown": "markdown", + "meta.embedded.block.rust": "rust" + } + }, + { + "injectTo": [ + "source.rust" + ], + "scopeName": "comment.markdown-cell-inject.rustdoc", + "path": "rustdoc-inject.json", + "embeddedLanguages": { + "meta.embedded.block.rustdoc": "rustdoc", + "meta.embedded.block.rust": "rust" + } } ], "problemMatchers": [ diff --git a/editors/code/rustdoc-inject.json b/editors/code/rustdoc-inject.json new file mode 100644 index 00000000000..7a4498fea9d --- /dev/null +++ b/editors/code/rustdoc-inject.json @@ -0,0 +1,93 @@ +{ + "injectionSelector": "L:source.rust -string -comment -meta.embedded.block.rustdoc.md", + "patterns": [ + { + "include": "#triple-slash" + }, + { + "include": "#double-slash-exclamation" + }, + { + "include": "#slash-start-exclamation" + }, + { + "include": "#slash-double-start" + } + ], + "repository": { + "triple-slash": { + "begin": "(^|\\G)\\s*(///) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.triple-slash.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(///) ?" + }, + "double-slash-exclamation": { + "begin": "(^|\\G)\\s*(//!) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.double-slash-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(//!) ?" + }, + "slash-start-exclamation": { + "begin": "(^)(/\\*!) ?$", + "captures": { + "2": { + "name": "comment.block.rust" + } + }, + "name": "comment.quote_code.slash-start-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "end": "( ?)(\\*/)" + }, + "slash-double-start": { + "name": "comment.quote_code.slash-double-start-quote-star.rust", + "begin": "(?:^)\\s*/\\*\\* ?$", + "end": "\\*/", + "patterns": [ + { + "include": "#quote-star" + } + ] + }, + "quote-star": { + "begin": "(^|\\G)\\s*(\\*(?!/)) ?", + "captures": { + "2": { + "name": "comment.punctuation.definition.quote_code.slash-star.MR" + } + }, + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(\\*(?!/)) ?" + } + }, + "scopeName": "comment.markdown-cell-inject.rustdoc" +} diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json new file mode 100644 index 00000000000..cecfae9d753 --- /dev/null +++ b/editors/code/rustdoc.json @@ -0,0 +1,82 @@ +{ + "name": "rustdoc", + "patterns": [ + { + "include": "#fenced_code_block" + }, + { + "include": "#markdown" + } + ], + "scopeName": "text.html.markdown.rustdoc", + "repository": { + "markdown": { + "patterns": [ + { + "include": "text.html.markdown" + } + ] + }, + "fenced_code_block": { + "patterns": [ + { + "include": "#fenced_code_block_rust" + }, + { + "include": "#fenced_code_block_unknown" + } + ] + }, + "fenced_code_block_rust": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?i:(rust|not run|not_run)?((\\s+|:|,|\\{|\\?)[^`~]*)?$)", + "name": "markup.fenced_code.block.markdown", + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language.markdown" + }, + "5": { + "name": "fenced_code.block.language.attributes.markdown" + } + }, + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "patterns": [ + { + "begin": "(^|\\G)(\\s*)(.*)", + "while": "(^|\\G)(?!\\s*([`~]{3,})\\s*$)", + "contentName": "meta.embedded.block.rust", + "patterns": [ + { + "include": "source.rust" + } + ] + } + ] + }, + "fenced_code_block_unknown": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?=([^`~]+)?$)", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language" + } + }, + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "name": "markup.fenced_code.block.markdown" + } + } +} diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts index bf65ad43ba5..c983874fc00 100644 --- a/editors/code/src/rust_project.ts +++ b/editors/code/src/rust_project.ts @@ -1,7 +1,26 @@ export interface JsonProject { + /// Path to the sysroot directory. + /// + /// The sysroot is where rustc looks for the + /// crates that are built-in to rust, such as + /// std. + /// + /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root + /// + /// To see the current value of sysroot, you + /// can query rustc: + /// + /// ``` + /// $ rustc --print sysroot + /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin + /// ``` + sysroot?: string; /// Path to the directory with *source code* of /// sysroot crates. /// + /// By default, this is `lib/rustlib/src/rust/library` + /// relative to the sysroot. + /// /// It should point to the directory where std, /// core, and friends can be found: /// diff --git a/lib/lsp-server/LICENSE-APACHE b/lib/lsp-server/LICENSE-APACHE new file mode 120000 index 00000000000..1cd601d0a3a --- /dev/null +++ b/lib/lsp-server/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/lib/lsp-server/LICENSE-MIT b/lib/lsp-server/LICENSE-MIT new file mode 120000 index 00000000000..b2cfbdc7b0b --- /dev/null +++ b/lib/lsp-server/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 9bd3a661c24..2efafa10a82 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -117,8 +117,6 @@ impl Metrics { sh, "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata" ) - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") .read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into()); From 591356738d397dc1d1f8a7b815ac374ede4a5cb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 08:32:33 +0200 Subject: [PATCH 25/92] Downgrade actions/checkout in release workflow --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index adb1c850516..88adc3bc757 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v3 with: fetch-depth: ${{ env.FETCH_DEPTH }} From 8ca834c86b6fc2da539d079d62c5e6baf26f2ca3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 08:55:37 +0200 Subject: [PATCH 26/92] Also downgrade actions/setup-node --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 88adc3bc757..147f516db9d 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -78,7 +78,7 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v3 with: node-version: 18 From 8544e729f1f0de1a35f1d7b87e9ec4ab8c669af2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 09:25:25 +0200 Subject: [PATCH 27/92] Also downgrade node to 16 --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 147f516db9d..ac536d0fdde 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -80,7 +80,7 @@ jobs: - name: Install Node.js uses: actions/setup-node@v3 with: - node-version: 18 + node-version: 16 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' From af174b9428fd4a80971b7bfc213f108facf62740 Mon Sep 17 00:00:00 2001 From: Young-Flash Date: Mon, 19 Feb 2024 18:09:33 +0800 Subject: [PATCH 28/92] internal: checkout repo before run typos --- .github/workflows/ci.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 62fbd57abc1..5a8b18e3fe1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -226,6 +226,11 @@ jobs: - name: download typos run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: check for typos run: typos From f3d84e86c8e5490e2ac96c0fcd0de179d9f4aa22 Mon Sep 17 00:00:00 2001 From: Young-Flash Date: Mon, 19 Feb 2024 18:12:08 +0800 Subject: [PATCH 29/92] minor: fix typo --- crates/hir/src/term_search/tactics.rs | 6 +++--- crates/ide-completion/src/item.rs | 2 +- crates/rust-analyzer/tests/slow-tests/support.rs | 2 +- crates/salsa/src/lib.rs | 4 ++-- crates/salsa/src/lru.rs | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index 666d63ac155..edbf75affe6 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -281,14 +281,14 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>( if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { return None; } - let fileds = it.fields(db); + let fields = it.fields(db); // Check if all fields are visible, otherwise we cannot fill them - if fileds.iter().any(|it| !it.is_visible_from(db, module)) { + if fields.iter().any(|it| !it.is_visible_from(db, module)) { return None; } // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = fileds + let param_exprs: Vec> = fields .into_iter() .map(|field| lookup.find(db, &field.ty(db))) .collect::>()?; diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index c2c0641961a..4bab2886851 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -308,7 +308,7 @@ impl CompletionRelevance { // When a fn is bumped due to return type: // Bump Constructor or Builder methods with no arguments, - // over them tha with self arguments + // over them than with self arguments if fn_score > 0 { if !asf.has_params { // bump associated functions diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 392a7170207..dfd25abc70f 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -243,7 +243,7 @@ impl Server { to_string_pretty(actual_part).unwrap(), ); } else { - tracing::debug!("sucessfully matched notification"); + tracing::debug!("successfully matched notification"); return; } } else { diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 668dcfd925d..48b5d633bd6 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -456,12 +456,12 @@ pub trait Query: Debug + Default + Sized + for<'d> QueryDb<'d> { /// Name of the query method (e.g., `foo`) const QUERY_NAME: &'static str; - /// Extact storage for this query from the storage for its group. + /// Extract storage for this query from the storage for its group. fn query_storage<'a>( group_storage: &'a >::GroupStorage, ) -> &'a std::sync::Arc; - /// Extact storage for this query from the storage for its group. + /// Extract storage for this query from the storage for its group. fn query_storage_mut<'a>( group_storage: &'a >::GroupStorage, ) -> &'a std::sync::Arc; diff --git a/crates/salsa/src/lru.rs b/crates/salsa/src/lru.rs index c6b9778f20a..1ff85a3ea45 100644 --- a/crates/salsa/src/lru.rs +++ b/crates/salsa/src/lru.rs @@ -40,7 +40,7 @@ pub(crate) trait LruNode: Sized + Debug { #[derive(Debug)] pub(crate) struct LruIndex { - /// Index in the approprate LRU list, or std::usize::MAX if not a + /// Index in the appropriate LRU list, or std::usize::MAX if not a /// member. index: AtomicUsize, } From 30b992e95a1e437b3e96b0e86373427f0fe2b121 Mon Sep 17 00:00:00 2001 From: Johannes Altmanninger Date: Sun, 14 Jan 2024 08:48:41 +0100 Subject: [PATCH 30/92] Deduplicate references to macro argument Commit 6a06f6f72 (Deduplicate reference search results, 2022-11-07) deduplicates references within each definition. There is an edge case when requesting references of a macro argument. Apparently, our descend_into_macros() stanza in references.rs produces a cartesian product of - references inside the macro times - times references outside the macro. Since the above deduplication only applies to the references within a single definition, we return them all, leading to many redundant references. Work around this by deduplicating definitions as well. Perhaps there is a better fix to not produce this cartesian product in the first place; but I think at least for definitions the problem would remain; a macro can contain multiple definitions of the same name, but since the navigation target will be the unresolved location, it's the same for all of them. We can't use unique() because we don't want to drop references that don't have a declaration (though I dont' have an example for this case). I discovered this working with the "bitflags" macro from the crate of the same name. Fixes #16357 --- crates/rust-analyzer/src/handlers/request.rs | 28 ++++++++++++++++---- crates/stdx/src/lib.rs | 16 +++++++++++ 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index eb9d4bf0f02..a677cea31b5 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -2,6 +2,7 @@ //! Protocol. This module specifically handles requests. use std::{ + collections::HashSet, fs, io::Write as _, path::PathBuf, @@ -13,7 +14,8 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, - ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + ReferenceCategory, ReferenceSearchResult, Runnable, RunnableKind, SingleResolve, SourceChange, + TextEdit, }; use ide_db::SymbolKind; use lsp_server::ErrorCode; @@ -28,6 +30,8 @@ use lsp_types::{ }; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; +#[allow(unused_imports)] +use stdx::IsNoneOr; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; use triomphe::Arc; @@ -1055,10 +1059,10 @@ pub(crate) fn handle_references( let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); - let refs = match snap.analysis.find_all_refs(position, None)? { - None => return Ok(None), - Some(refs) => refs, + let Some(mut refs) = snap.analysis.find_all_refs(position, None)? else { + return Ok(None); }; + deduplicate_declarations(&mut refs); let include_declaration = params.context.include_declaration; let locations = refs @@ -1090,6 +1094,17 @@ pub(crate) fn handle_references( Ok(Some(locations)) } +fn deduplicate_declarations(refs: &mut Vec) { + if refs.iter().filter(|decl| decl.declaration.is_some()).take(2).count() > 1 { + let mut seen_navigation_targets = HashSet::new(); + refs.retain(|res| { + res.declaration + .as_ref() + .is_none_or(|decl| seen_navigation_targets.insert(decl.nav.clone())) + }); + } +} + pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, @@ -1794,7 +1809,10 @@ fn show_ref_command_link( position: &FilePosition, ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { + if let Some(mut ref_search_res) = + snap.analysis.find_all_refs(*position, None).unwrap_or(None) + { + deduplicate_declarations(&mut ref_search_res); let uri = to_proto::url(snap, position.file_id); let line_index = snap.file_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 9a9ebae74e8..0504ca50b88 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -302,6 +302,22 @@ pub fn slice_tails(this: &[T]) -> impl Iterator { (0..this.len()).map(|i| &this[i..]) } +pub trait IsNoneOr { + type Type; + #[allow(clippy::wrong_self_convention)] + fn is_none_or(self, s: impl FnOnce(Self::Type) -> bool) -> bool; +} +#[allow(unstable_name_collisions)] +impl IsNoneOr for Option { + type Type = T; + fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool { + match self { + Some(v) => f(v), + None => true, + } + } +} + #[cfg(test)] mod tests { use super::*; From 91a8f34aeed075427ad4f6c0c6f58f247ac7de42 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 12:22:27 +0100 Subject: [PATCH 31/92] Deduplicate lsp locations --- crates/rust-analyzer/src/handlers/request.rs | 33 +++++--------------- crates/rust-analyzer/src/lsp/to_proto.rs | 7 +++-- 2 files changed, 12 insertions(+), 28 deletions(-) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index a677cea31b5..04a04395429 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -2,7 +2,6 @@ //! Protocol. This module specifically handles requests. use std::{ - collections::HashSet, fs, io::Write as _, path::PathBuf, @@ -14,10 +13,10 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, - ReferenceCategory, ReferenceSearchResult, Runnable, RunnableKind, SingleResolve, SourceChange, - TextEdit, + ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; +use itertools::Itertools; use lsp_server::ErrorCode; use lsp_types::{ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, @@ -30,8 +29,6 @@ use lsp_types::{ }; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; -#[allow(unused_imports)] -use stdx::IsNoneOr; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; use triomphe::Arc; @@ -1059,10 +1056,9 @@ pub(crate) fn handle_references( let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); - let Some(mut refs) = snap.analysis.find_all_refs(position, None)? else { + let Some(refs) = snap.analysis.find_all_refs(position, None)? else { return Ok(None); }; - deduplicate_declarations(&mut refs); let include_declaration = params.context.include_declaration; let locations = refs @@ -1088,23 +1084,13 @@ pub(crate) fn handle_references( }) .chain(decl) }) + .unique() .filter_map(|frange| to_proto::location(&snap, frange).ok()) .collect(); Ok(Some(locations)) } -fn deduplicate_declarations(refs: &mut Vec) { - if refs.iter().filter(|decl| decl.declaration.is_some()).take(2).count() > 1 { - let mut seen_navigation_targets = HashSet::new(); - refs.retain(|res| { - res.declaration - .as_ref() - .is_none_or(|decl| seen_navigation_targets.insert(decl.nav.clone())) - }); - } -} - pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, @@ -1809,10 +1795,7 @@ fn show_ref_command_link( position: &FilePosition, ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(mut ref_search_res) = - snap.analysis.find_all_refs(*position, None).unwrap_or(None) - { - deduplicate_declarations(&mut ref_search_res); + if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { let uri = to_proto::url(snap, position.file_id); let line_index = snap.file_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); @@ -1820,10 +1803,10 @@ fn show_ref_command_link( .into_iter() .flat_map(|res| res.references) .flat_map(|(file_id, ranges)| { - ranges.into_iter().filter_map(move |(range, _)| { - to_proto::location(snap, FileRange { file_id, range }).ok() - }) + ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) }) + .unique() + .filter_map(|range| to_proto::location(snap, range).ok()) .collect(); let title = to_proto::reference_title(locations.len()); let command = to_proto::command::show_references(title, &uri, position, locations); diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 727007bba08..4101d476cd3 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -904,15 +904,16 @@ pub(crate) fn goto_definition_response( if snap.config.location_link() { let links = targets .into_iter() + .unique_by(|nav| (nav.file_id, nav.full_range, nav.focus_range)) .map(|nav| location_link(snap, src, nav)) .collect::>>()?; Ok(links.into()) } else { let locations = targets .into_iter() - .map(|nav| { - location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) - }) + .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .unique() + .map(|range| location(snap, range)) .collect::>>()?; Ok(locations.into()) } From 6e16edb3bfc641b2a9a17b2fa66da05fc3d368e8 Mon Sep 17 00:00:00 2001 From: UserIsntAvailable Date: Sun, 4 Feb 2024 08:17:02 -0500 Subject: [PATCH 32/92] feat: append `as ` when renaming inside an "UseTree". test: include `rename_path_inside_use_tree`. Keeps tracks the progress of the changes. 3 other tests broke with the changes of this. feat: rename all other usages within the current file. feat: fix most of the implementation problems. test: `rename_path_inside_use_tree` tests a more complicated scenario. --- crates/ide/src/rename.rs | 120 ++++++++++++++++++++++++++++++++++----- 1 file changed, 106 insertions(+), 14 deletions(-) diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index f2eedfa4316..c128fa5f41a 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -9,6 +9,7 @@ use ide_db::{ base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, + source_change::SourceChangeBuilder, RootDatabase, }; use itertools::Itertools; @@ -90,24 +91,60 @@ pub(crate) fn rename( let syntax = source_file.syntax(); let defs = find_definitions(&sema, syntax, position)?; + let alias_fallback = alias_fallback(syntax, position, new_name); - let ops: RenameResult> = defs - .map(|(.., def)| { - if let Definition::Local(local) = def { - if let Some(self_param) = local.as_self_param(sema.db) { - cov_mark::hit!(rename_self_to_param); - return rename_self_to_param(&sema, local, self_param, new_name); + let ops: RenameResult> = match alias_fallback { + Some(_) => defs + // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can + // properly find "direct" usages/references. + .map(|(.., def)| { + match IdentifierKind::classify(new_name)? { + IdentifierKind::Ident => (), + IdentifierKind::Lifetime => { + bail!("Cannot alias reference to a lifetime identifier") + } + IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"), + }; + + let mut usages = def.usages(&sema).all(); + + // FIXME: hack - removes the usage that triggered this rename operation. + match usages.references.get_mut(&position.file_id).and_then(|refs| { + refs.iter() + .position(|ref_| ref_.range.contains_inclusive(position.offset)) + .map(|idx| refs.remove(idx)) + }) { + Some(_) => (), + None => never!(), + }; + + let mut source_change = SourceChange::default(); + source_change.extend(usages.iter().map(|(&file_id, refs)| { + (file_id, source_edit_from_references(refs, def, new_name)) + })); + + Ok(source_change) + }) + .collect(), + None => defs + .map(|(.., def)| { + if let Definition::Local(local) = def { + if let Some(self_param) = local.as_self_param(sema.db) { + cov_mark::hit!(rename_self_to_param); + return rename_self_to_param(&sema, local, self_param, new_name); + } + if new_name == "self" { + cov_mark::hit!(rename_to_self); + return rename_to_self(&sema, local); + } } - if new_name == "self" { - cov_mark::hit!(rename_to_self); - return rename_to_self(&sema, local); - } - } - def.rename(&sema, new_name) - }) - .collect(); + def.rename(&sema, new_name, rename_external) + }) + .collect(), + }; ops?.into_iter() + .chain(alias_fallback) .reduce(|acc, elem| acc.merge(elem)) .ok_or_else(|| format_err!("No references found at position")) } @@ -130,6 +167,38 @@ pub(crate) fn will_rename_file( Some(change) } +// FIXME: Should support `extern crate`. +fn alias_fallback( + syntax: &SyntaxNode, + FilePosition { file_id, offset }: FilePosition, + new_name: &str, +) -> Option { + let use_tree = syntax + .token_at_offset(offset) + .flat_map(|syntax| syntax.parent_ancestors()) + .find_map(ast::UseTree::cast)?; + + let last_path_segment = use_tree.path()?.segments().last()?.name_ref()?; + if !last_path_segment.syntax().text_range().contains_inclusive(offset) { + return None; + }; + + let mut builder = SourceChangeBuilder::new(file_id); + + match use_tree.rename() { + Some(rename) => { + let offset = rename.syntax().text_range(); + builder.replace(offset, format!("as {new_name}")); + } + None => { + let offset = use_tree.syntax().text_range().end(); + builder.insert(offset, format!(" as {new_name}")); + } + } + + Some(builder.finish()) +} + fn find_definitions( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, @@ -2686,4 +2755,27 @@ fn test() { "#, ); } + + #[test] + fn rename_path_inside_use_tree() { + check( + "Baz", + r#" +mod foo { pub struct Foo; } +mod bar { use super::Foo; } + +use foo::Foo$0; + +fn main() { let _: Foo; } +"#, + r#" +mod foo { pub struct Foo; } +mod bar { use super::Baz; } + +use foo::Foo as Baz; + +fn main() { let _: Baz; } +"#, + ) + } } From 69c25327f48aa6e0297bff6249fb027c7b836eea Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Sun, 14 Jan 2024 15:19:20 +0000 Subject: [PATCH 33/92] internal: reduce body lookups in expr diagnostics --- crates/hir-ty/src/diagnostics/expr.rs | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index c4329a7b82b..afb80e1f445 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -57,7 +57,8 @@ impl BodyValidationDiagnostic { let _p = tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered(); let infer = db.infer(owner); - let mut validator = ExprValidator::new(owner, infer); + let body = db.body(owner); + let mut validator = ExprValidator { owner, body, infer, diagnostics: Vec::new() }; validator.validate_body(db); validator.diagnostics } @@ -65,18 +66,16 @@ impl BodyValidationDiagnostic { struct ExprValidator { owner: DefWithBodyId, + body: Arc, infer: Arc, pub(super) diagnostics: Vec, } impl ExprValidator { - fn new(owner: DefWithBodyId, infer: Arc) -> ExprValidator { - ExprValidator { owner, infer, diagnostics: Vec::new() } - } - fn validate_body(&mut self, db: &dyn HirDatabase) { - let body = db.body(self.owner); let mut filter_map_next_checker = None; + // we'll pass &mut self while iterating over body.exprs, so they need to be disjoint + let body = Arc::clone(&self.body); if matches!(self.owner, DefWithBodyId::FunctionId(_)) { self.check_for_trailing_return(body.body_expr, &body); @@ -162,8 +161,6 @@ impl ExprValidator { arms: &[MatchArm], db: &dyn HirDatabase, ) { - let body = db.body(self.owner); - let scrut_ty = &self.infer[scrutinee_expr]; if scrut_ty.is_unknown() { return; @@ -191,12 +188,12 @@ impl ExprValidator { .as_reference() .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty) .unwrap_or(false)) - && types_of_subpatterns_do_match(arm.pat, &body, &self.infer) + && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer) { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. - let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); + let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), @@ -244,10 +241,9 @@ impl ExprValidator { cx: &MatchCheckCtx<'p>, pat: PatId, db: &dyn HirDatabase, - body: &Body, have_errors: &mut bool, ) -> DeconstructedPat<'p> { - let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); + let mut patcx = match_check::PatCtxt::new(db, &self.infer, &self.body); let pattern = patcx.lower_pattern(pat); let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { From 5390e4ce9bdb822ea4899e2df4383a7076d820cf Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Fri, 5 Jan 2024 17:38:29 +0000 Subject: [PATCH 34/92] feat: add non-exhaustive-let diagnostic --- crates/hir-ty/src/diagnostics/expr.rs | 60 +++++++++++++++++-- crates/hir/src/diagnostics.rs | 23 +++++++ .../src/handlers/mutability_errors.rs | 2 +- .../src/handlers/non_exhaustive_let.rs | 47 +++++++++++++++ crates/ide-diagnostics/src/lib.rs | 2 + 5 files changed, 129 insertions(+), 5 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index afb80e1f445..0c5d6399619 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -12,6 +12,7 @@ use hir_expand::name; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; +use tracing::debug; use triomphe::Arc; use typed_arena::Arena; @@ -44,6 +45,10 @@ pub enum BodyValidationDiagnostic { match_expr: ExprId, uncovered_patterns: String, }, + NonExhaustiveLet { + pat: PatId, + uncovered_patterns: String, + }, RemoveTrailingReturn { return_expr: ExprId, }, @@ -68,7 +73,7 @@ struct ExprValidator { owner: DefWithBodyId, body: Arc, infer: Arc, - pub(super) diagnostics: Vec, + diagnostics: Vec, } impl ExprValidator { @@ -105,6 +110,9 @@ impl ExprValidator { Expr::If { .. } => { self.check_for_unnecessary_else(id, expr, &body); } + Expr::Block { .. } => { + self.validate_block(db, expr); + } _ => {} } } @@ -231,11 +239,55 @@ impl ExprValidator { if !witnesses.is_empty() { self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms { match_expr, - uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms), + uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()), }); } } + fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) { + let Expr::Block { statements, .. } = expr else { return }; + let pattern_arena = Arena::new(); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + for stmt in &**statements { + let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else { + continue; + }; + let Some(initializer) = initializer else { continue }; + let ty = &self.infer[initializer]; + + let mut have_errors = false; + let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors); + let match_arm = rustc_pattern_analysis::MatchArm { + pat: pattern_arena.alloc(deconstructed_pat), + has_guard: false, + arm_data: (), + }; + if have_errors { + continue; + } + + let report = match compute_match_usefulness( + &cx, + &[match_arm], + ty.clone(), + ValidityConstraint::ValidOnly, + ) { + Ok(v) => v, + Err(e) => { + debug!(?e, "match usefulness error"); + continue; + } + }; + let witnesses = report.non_exhaustiveness_witnesses; + if !witnesses.is_empty() { + self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet { + pat, + uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false), + }); + } + } + } + fn lower_pattern<'p>( &self, cx: &MatchCheckCtx<'p>, @@ -444,7 +496,7 @@ fn missing_match_arms<'p>( cx: &MatchCheckCtx<'p>, scrut_ty: &Ty, witnesses: Vec>, - arms: &[MatchArm], + arms_is_empty: bool, ) -> String { struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>); impl fmt::Display for DisplayWitness<'_, '_> { @@ -459,7 +511,7 @@ fn missing_match_arms<'p>( Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(), _ => false, }; - if arms.is_empty() && !non_empty_enum { + if arms_is_empty && !non_empty_enum { format!("type `{}` is non-empty", scrut_ty.display(cx.db)) } else { let pat_display = |witness| DisplayWitness(witness, cx); diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 08843a6c999..d351e257d2e 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -64,6 +64,7 @@ diagnostics![ MissingUnsafe, MovedOutOfRef, NeedMut, + NonExhaustiveLet, NoSuchField, PrivateAssocItem, PrivateField, @@ -280,6 +281,12 @@ pub struct MissingMatchArms { pub uncovered_patterns: String, } +#[derive(Debug)] +pub struct NonExhaustiveLet { + pub pat: InFile>, + pub uncovered_patterns: String, +} + #[derive(Debug)] pub struct TypeMismatch { pub expr_or_pat: InFile>>, @@ -456,6 +463,22 @@ impl AnyDiagnostic { Err(SyntheticSyntax) => (), } } + BodyValidationDiagnostic::NonExhaustiveLet { pat, uncovered_patterns } => { + match source_map.pat_syntax(pat) { + Ok(source_ptr) => { + if let Some(ast_pat) = source_ptr.value.cast::() { + return Some( + NonExhaustiveLet { + pat: InFile::new(source_ptr.file_id, ast_pat), + uncovered_patterns, + } + .into(), + ); + } + } + Err(SyntheticSyntax) => {} + } + } BodyValidationDiagnostic::RemoveTrailingReturn { return_expr } => { if let Ok(source_ptr) = source_map.expr_syntax(return_expr) { // Filters out desugared return expressions (e.g. desugared try operators). diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index bdb55a9d98a..3c71f84dc48 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -817,7 +817,7 @@ fn f() { //- minicore: option fn f(_: i32) {} fn main() { - let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)); + let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)) else { return }; //^^^^^ 💡 warn: variable does not need to be mutable f(x); } diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs new file mode 100644 index 00000000000..1a4d2877ef2 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -0,0 +1,47 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: non-exhaustive-let +// +// This diagnostic is triggered if a `let` statement without an `else` branch has a non-exhaustive +// pattern. +pub(crate) fn non_exhaustive_let( + ctx: &DiagnosticsContext<'_>, + d: &hir::NonExhaustiveLet, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0005"), + format!("non-exhaustive pattern: {}", d.uncovered_patterns), + d.pat.map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn option_nonexhaustive() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered +} +"#, + ); + } + + #[test] + fn option_exhaustive() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + let Some(_) | None = Some(5); +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 9d21bb4cd9f..3a3888011d7 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -41,6 +41,7 @@ mod handlers { pub(crate) mod moved_out_of_ref; pub(crate) mod mutability_errors; pub(crate) mod no_such_field; + pub(crate) mod non_exhaustive_let; pub(crate) mod private_assoc_item; pub(crate) mod private_field; pub(crate) mod remove_trailing_return; @@ -359,6 +360,7 @@ pub fn diagnostics( AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::MovedOutOfRef(d) => handlers::moved_out_of_ref::moved_out_of_ref(&ctx, &d), AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d), + AnyDiagnostic::NonExhaustiveLet(d) => handlers::non_exhaustive_let::non_exhaustive_let(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), From 6dd5dc10ef2a7e305045657a9b71662a53884f9f Mon Sep 17 00:00:00 2001 From: UserIsntAvailable Date: Mon, 5 Feb 2024 13:26:47 -0500 Subject: [PATCH 35/92] test: fix `disallow_renaming_for_non_local_definition` to follow PR changes. --- crates/ide/src/rename.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index c128fa5f41a..f78153df38b 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -138,7 +138,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(), }; @@ -2695,7 +2695,8 @@ use qux as frob; //- /lib.rs crate:lib new_source_root:library pub struct S; //- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; +use lib::S; +fn main() { let _: S$0; } "#, "error: Cannot rename a non-local definition", ); From e9c80a9c256677d85398d93880eec1e54e226d2a Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 15 Feb 2024 00:59:13 +0900 Subject: [PATCH 36/92] fix: False positive diagnostic for necessary `else` --- crates/hir-ty/src/diagnostics/expr.rs | 8 ++++++- .../src/handlers/remove_unnecessary_else.rs | 23 +++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 0c5d6399619..571f01dde2e 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -338,7 +338,13 @@ impl ExprValidator { fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { if let Expr::If { condition: _, then_branch, else_branch } = expr { - if else_branch.is_none() { + if let Some(else_branch) = else_branch { + // If else branch has a tail, it is an "expression" that produces a value, + // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary + if let Expr::Block { tail: Some(_), .. } = body.exprs[*else_branch] { + return; + } + } else { return; } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index ae8241ec2c6..813c07a505d 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -384,6 +384,29 @@ fn test() { return bar; } } +"#, + ); + } + + #[test] + fn no_diagnostic_if_tail_exists_in_else_branch() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test1(a: bool) { + let _x = if a { + return; + } else { + 1 + }; +} + +fn test2(a: bool) -> i32 { + if a { + return 1; + } else { + 0 + } +} "#, ); } From d14b22863bd58e78a8e9193db987ae22ba1e57e1 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 15 Feb 2024 01:29:48 +0900 Subject: [PATCH 37/92] Handle cases for `else if` --- crates/hir-ty/src/diagnostics/expr.rs | 19 +++++++++++++++++-- .../src/handlers/remove_unnecessary_else.rs | 12 ++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 571f01dde2e..ff70618ca12 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -341,8 +341,23 @@ impl ExprValidator { if let Some(else_branch) = else_branch { // If else branch has a tail, it is an "expression" that produces a value, // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary - if let Expr::Block { tail: Some(_), .. } = body.exprs[*else_branch] { - return; + let mut branch = *else_branch; + loop { + match body.exprs[branch] { + Expr::Block { tail: Some(_), .. } => return, + Expr::If { then_branch, else_branch, .. } => { + if let Expr::Block { tail: Some(_), .. } = body.exprs[then_branch] { + return; + } + if let Some(else_branch) = else_branch { + // Continue checking for branches like `if { ... } else if { ... } else...` + branch = else_branch; + continue; + } + } + _ => break, + } + break; } } else { return; diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 813c07a505d..bbc10e96cef 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -407,6 +407,18 @@ fn test2(a: bool) -> i32 { 0 } } + +fn test3(a: bool, b: bool, c: bool) { + let _x = if a { + return; + } else if b { + return; + } else if c { + 1 + } else { + return; + }; +} "#, ); } From 21f4ff03516eebb3cbdd8947ad3a7a00c980a692 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Fri, 16 Feb 2024 23:53:00 +0900 Subject: [PATCH 38/92] Check for let expr ancestors instead of tail expr --- crates/hir-ty/src/diagnostics/expr.rs | 52 ++++++++++--------- .../src/handlers/remove_unnecessary_else.rs | 10 +--- 2 files changed, 29 insertions(+), 33 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index ff70618ca12..718409e1599 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -12,6 +12,7 @@ use hir_expand::name; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; +use syntax::{ast, AstNode}; use tracing::debug; use triomphe::Arc; use typed_arena::Arena; @@ -108,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, &body); + self.check_for_unnecessary_else(id, expr, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -336,32 +337,35 @@ impl ExprValidator { } } - fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { if let Expr::If { condition: _, then_branch, else_branch } = expr { - if let Some(else_branch) = else_branch { - // If else branch has a tail, it is an "expression" that produces a value, - // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary - let mut branch = *else_branch; - loop { - match body.exprs[branch] { - Expr::Block { tail: Some(_), .. } => return, - Expr::If { then_branch, else_branch, .. } => { - if let Expr::Block { tail: Some(_), .. } = body.exprs[then_branch] { - return; - } - if let Some(else_branch) = else_branch { - // Continue checking for branches like `if { ... } else if { ... } else...` - branch = else_branch; - continue; - } - } - _ => break, - } - break; - } - } else { + if else_branch.is_none() { return; } + let (body, source_map) = db.body_with_source_map(self.owner); + let Ok(source_ptr) = source_map.expr_syntax(id) else { + return; + }; + let root = source_ptr.file_syntax(db.upcast()); + let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_let_stmt = + parent.as_ref().map_or(false, |node| ast::LetStmt::can_cast(node.kind())); + if has_parent_let_stmt { + // Bail if parent or direct ancestor is a let stmt. + return; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Parent is neither an if expr nor a let stmt. + break; + }; + // Check parent if expr. + top_if_expr = parent_if_expr; + } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index bbc10e96cef..351f728747e 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -400,15 +400,7 @@ fn test1(a: bool) { }; } -fn test2(a: bool) -> i32 { - if a { - return 1; - } else { - 0 - } -} - -fn test3(a: bool, b: bool, c: bool) { +fn test2(a: bool, b: bool, c: bool) { let _x = if a { return; } else if b { From 8f6e2127c3a1537298cbda11a8f261875edd6659 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Fri, 16 Feb 2024 23:54:01 +0900 Subject: [PATCH 39/92] Fix the remove unnecessary else action to preserve block tail expr --- .../src/handlers/remove_unnecessary_else.rs | 43 +++++++++++++++++-- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 351f728747e..289ce640354 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -41,9 +41,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option { - block.statements().map(|stmt| format!("\n{indent}{stmt}")).join("") - } + ast::ElseBranch::Block(ref block) => block + .statements() + .map(|stmt| format!("\n{indent}{stmt}")) + .chain(block.tail_expr().map(|tail| format!("\n{indent}{tail}"))) + .join(""), ast::ElseBranch::IfExpr(ref nested_if_expr) => { format!("\n{indent}{nested_if_expr}") } @@ -171,6 +173,41 @@ fn test() { ); } + #[test] + fn remove_unnecessary_else_for_return3() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } else { + //^^^^ 💡 weak: remove unnecessary else block + 0 + } +} +"#, + ); + check_fix( + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } else$0 { + 0 + } +} +"#, + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } + 0 +} +"#, + ); + } + #[test] fn remove_unnecessary_else_for_return_in_child_if_expr() { check_diagnostics_with_needless_return_disabled( From 1205853c3689a69e81578dfd066b17e3ebe376cf Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 17 Feb 2024 00:55:45 +0900 Subject: [PATCH 40/92] Apply indent fix in #16575 --- .../src/handlers/remove_unnecessary_else.rs | 47 +++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 289ce640354..9564807a334 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -2,7 +2,10 @@ use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt}; use ide_db::{assists::Assist, source_change::SourceChange}; use itertools::Itertools; use syntax::{ - ast::{self, edit::IndentLevel}, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + }, AstNode, SyntaxToken, TextRange, }; use text_edit::TextEdit; @@ -41,12 +44,15 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option block + ast::ElseBranch::Block(block) => block .statements() .map(|stmt| format!("\n{indent}{stmt}")) .chain(block.tail_expr().map(|tail| format!("\n{indent}{tail}"))) .join(""), - ast::ElseBranch::IfExpr(ref nested_if_expr) => { + ast::ElseBranch::IfExpr(mut nested_if_expr) => { + if has_parent_if_expr { + nested_if_expr = nested_if_expr.indent(IndentLevel(1)) + } format!("\n{indent}{nested_if_expr}") } }; @@ -251,6 +257,41 @@ fn test() { ); } + #[test] + fn remove_unnecessary_else_for_return_in_child_if_expr2() { + check_fix( + r#" +fn test() { + if foo { + do_something(); + } else if qux { + return bar; + } else$0 if quux { + do_something_else(); + } else { + do_something_else2(); + } +} +"#, + r#" +fn test() { + if foo { + do_something(); + } else { + if qux { + return bar; + } + if quux { + do_something_else(); + } else { + do_something_else2(); + } + } +} +"#, + ); + } + #[test] fn remove_unnecessary_else_for_break() { check_diagnostics( From ff7031008651021c330b93d4bd502810022b045d Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Fri, 16 Feb 2024 20:39:52 +0300 Subject: [PATCH 41/92] fix: only emit "unnecessary else" diagnostic for expr stmts --- crates/hir-ty/src/diagnostics/expr.rs | 64 +++++++++++-------- .../src/handlers/remove_unnecessary_else.rs | 14 +++- 2 files changed, 49 insertions(+), 29 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 718409e1599..4fe75f24b80 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -109,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, db); + self.check_for_unnecessary_else(id, expr, &body, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -337,35 +337,17 @@ impl ExprValidator { } } - fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { + fn check_for_unnecessary_else( + &mut self, + id: ExprId, + expr: &Expr, + body: &Body, + db: &dyn HirDatabase, + ) { if let Expr::If { condition: _, then_branch, else_branch } = expr { if else_branch.is_none() { return; } - let (body, source_map) = db.body_with_source_map(self.owner); - let Ok(source_ptr) = source_map.expr_syntax(id) else { - return; - }; - let root = source_ptr.file_syntax(db.upcast()); - let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { - return; - }; - let mut top_if_expr = if_expr; - loop { - let parent = top_if_expr.syntax().parent(); - let has_parent_let_stmt = - parent.as_ref().map_or(false, |node| ast::LetStmt::can_cast(node.kind())); - if has_parent_let_stmt { - // Bail if parent or direct ancestor is a let stmt. - return; - } - let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { - // Parent is neither an if expr nor a let stmt. - break; - }; - // Check parent if expr. - top_if_expr = parent_if_expr; - } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), @@ -374,6 +356,36 @@ impl ExprValidator { if let Some(last_then_expr) = last_then_expr { let last_then_expr_ty = &self.infer[last_then_expr]; if last_then_expr_ty.is_never() { + // Only look at sources if the then branch diverges and we have an else branch. + let (_, source_map) = db.body_with_source_map(self.owner); + let Ok(source_ptr) = source_map.expr_syntax(id) else { + return; + }; + let root = source_ptr.file_syntax(db.upcast()); + let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_expr_stmt_or_stmt_list = + parent.as_ref().map_or(false, |node| { + ast::ExprStmt::can_cast(node.kind()) + | ast::StmtList::can_cast(node.kind()) + }); + if has_parent_expr_stmt_or_stmt_list { + // Only emit diagnostic if parent or direct ancestor is either + // an expr stmt or a stmt list. + break; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Bail if parent is neither an if expr, an expr stmt nor a stmt list. + return; + }; + // Check parent if expr. + top_if_expr = parent_if_expr; + } + self.diagnostics .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 9564807a334..7bfd64596ed 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -467,10 +467,10 @@ fn test() { } #[test] - fn no_diagnostic_if_tail_exists_in_else_branch() { + fn no_diagnostic_if_not_expr_stmt() { check_diagnostics_with_needless_return_disabled( r#" -fn test1(a: bool) { +fn test1() { let _x = if a { return; } else { @@ -478,7 +478,7 @@ fn test1(a: bool) { }; } -fn test2(a: bool, b: bool, c: bool) { +fn test2() { let _x = if a { return; } else if b { @@ -491,5 +491,13 @@ fn test2(a: bool, b: bool, c: bool) { } "#, ); + check_diagnostics_with_disabled( + r#" +fn test3() { + foo(if a { return 1 } else { 0 }) +} +"#, + std::iter::once("E0308".to_owned()), + ); } } From 7dfeb2cdcc2a899f929bb3da1c2db7fe6725fb47 Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Mon, 19 Feb 2024 14:43:43 +0300 Subject: [PATCH 42/92] refactor "unnecessary else" diagnostic test --- .../ide-diagnostics/src/handlers/remove_unnecessary_else.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 7bfd64596ed..9c63d79d910 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -491,13 +491,12 @@ fn test2() { } "#, ); - check_diagnostics_with_disabled( + check_diagnostics( r#" -fn test3() { +fn test3() -> u8 { foo(if a { return 1 } else { 0 }) } "#, - std::iter::once("E0308".to_owned()), ); } } From f2218e727840ec0286d89ecc758322674e1efb6d Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Mon, 19 Feb 2024 15:35:47 +0300 Subject: [PATCH 43/92] refactor: remove body parameter for "unnecessary else" diagnostic --- crates/hir-ty/src/diagnostics/expr.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 4fe75f24b80..6c8a1875165 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -109,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, &body, db); + self.check_for_unnecessary_else(id, expr, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -337,18 +337,12 @@ impl ExprValidator { } } - fn check_for_unnecessary_else( - &mut self, - id: ExprId, - expr: &Expr, - body: &Body, - db: &dyn HirDatabase, - ) { + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { if let Expr::If { condition: _, then_branch, else_branch } = expr { if else_branch.is_none() { return; } - if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { + if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), _ => None, From d818b531c98d5361310e43127e05f5fe02d88013 Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Fri, 16 Feb 2024 14:54:58 +0000 Subject: [PATCH 44/92] internal: make check_diagnostics_with_disabled more ergonomic --- .../src/handlers/incorrect_case.rs | 2 +- .../src/handlers/mutability_errors.rs | 4 ++-- .../src/handlers/remove_trailing_return.rs | 2 +- .../src/handlers/remove_unnecessary_else.rs | 16 ++++++++-------- .../src/handlers/type_mismatch.rs | 2 +- crates/ide-diagnostics/src/tests.rs | 7 ++----- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 5e2541795ca..db28928a24e 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -512,7 +512,7 @@ impl BAD_TRAIT for () { fn BadFunction() {} } "#, - std::iter::once("unused_variables".to_owned()), + &["unused_variables"], ); } diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 3c71f84dc48..91f1058d65b 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -448,7 +448,7 @@ fn main(b: bool) { &mut x; } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); check_diagnostics_with_disabled( r#" @@ -463,7 +463,7 @@ fn main(b: bool) { &mut x; } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index b7667dc318f..7a040e46e33 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -140,7 +140,7 @@ fn foo(x: usize) -> u8 { } //^^^^^^^^^ 💡 weak: replace return ; with } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 9c63d79d910..8310af0f524 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -97,13 +97,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option i32 { #[test] fn remove_unnecessary_else_for_return_in_child_if_expr() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -229,6 +227,7 @@ fn test() { } } "#, + &["needless_return"], ); check_fix( r#" @@ -453,7 +452,7 @@ fn test() { #[test] fn no_diagnostic_if_no_divergence_in_else_branch() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -463,6 +462,7 @@ fn test() { } } "#, + &["needless_return"], ); } diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 8c97281b783..4c255322280 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -730,7 +730,7 @@ fn f() -> i32 { } fn g() { return; } "#, - std::iter::once("needless_return".to_owned()), + &["needless_return"], ); } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 4e4a851f67e..9e134620ee3 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -198,12 +198,9 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { } #[track_caller] -pub(crate) fn check_diagnostics_with_disabled( - ra_fixture: &str, - disabled: impl Iterator, -) { +pub(crate) fn check_diagnostics_with_disabled(ra_fixture: &str, disabled: &[&str]) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.extend(disabled); + config.disabled.extend(disabled.into_iter().map(|&s| s.to_owned())); check_diagnostics_with_config(config, ra_fixture) } From a492d9d16489e2801fc0933f969cf3d3a2f88206 Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Sun, 11 Feb 2024 15:34:52 +0000 Subject: [PATCH 45/92] feat: add unresolved-ident diagnostic --- crates/hir-ty/src/infer.rs | 3 ++ crates/hir-ty/src/infer/expr.rs | 14 +++++- crates/hir/src/diagnostics.rs | 10 ++++ .../src/handlers/inactive_code.rs | 1 + .../src/handlers/missing_fields.rs | 3 +- .../src/handlers/missing_match_arms.rs | 16 ++++--- .../src/handlers/remove_unnecessary_else.rs | 25 ++++++---- .../src/handlers/undeclared_label.rs | 8 ++-- .../src/handlers/unresolved_field.rs | 7 ++- .../src/handlers/unresolved_ident.rs | 46 +++++++++++++++++++ .../src/handlers/unresolved_method.rs | 4 +- crates/ide-diagnostics/src/lib.rs | 2 + 12 files changed, 113 insertions(+), 26 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/unresolved_ident.rs diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 1977f00517c..9cea414e1a0 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -221,6 +221,9 @@ pub enum InferenceDiagnostic { UnresolvedAssocItem { id: ExprOrPatId, }, + UnresolvedIdent { + expr: ExprId, + }, // FIXME: This should be emitted in body lowering BreakOutsideOfLoop { expr: ExprId, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 428ed6748c6..c377a51e7d3 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -13,7 +13,7 @@ use hir_def::{ ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, - path::{GenericArg, GenericArgs}, + path::{GenericArg, GenericArgs, Path}, BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; @@ -439,7 +439,17 @@ impl InferenceContext<'_> { } Expr::Path(p) => { let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); - let ty = self.infer_path(p, tgt_expr.into()).unwrap_or_else(|| self.err_ty()); + let ty = match self.infer_path(p, tgt_expr.into()) { + Some(ty) => ty, + None => { + if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) { + self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { + expr: tgt_expr, + }); + } + self.err_ty() + } + }; self.resolver.reset_to_guard(g); ty } diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index d351e257d2e..80cd0c9c794 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -87,6 +87,7 @@ diagnostics![ UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule, + UnresolvedIdent, UnresolvedProcMacro, UnusedMut, UnusedVariable, @@ -242,6 +243,11 @@ pub struct UnresolvedAssocItem { pub expr_or_pat: InFile>>>, } +#[derive(Debug)] +pub struct UnresolvedIdent { + pub expr: InFile>, +} + #[derive(Debug)] pub struct PrivateField { pub expr: InFile>, @@ -588,6 +594,10 @@ impl AnyDiagnostic { }; UnresolvedAssocItem { expr_or_pat }.into() } + &InferenceDiagnostic::UnresolvedIdent { expr } => { + let expr = expr_syntax(expr); + UnresolvedIdent { expr }.into() + } &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { let expr = expr_syntax(expr); BreakOutsideOfLoop { expr, is_break, bad_value_break }.into() diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 7db5ea04fbd..785a42352bf 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -60,6 +60,7 @@ fn f() { #[cfg(a)] let x = 0; // let statement //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled + fn abc() {} abc(#[cfg(a)] 0); //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled let x = Struct { diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index c70f39eb286..09daefd084d 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -634,7 +634,8 @@ struct TestStruct { one: i32, two: i64 } fn test_fn() { let one = 1; - let s = TestStruct{ ..a }; + let a = TestStruct{ one, two: 2 }; + let _ = TestStruct{ ..a }; } "#, ); diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 7632fdf1d09..8596f5792e0 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -18,7 +18,9 @@ pub(crate) fn missing_match_arms( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config}, + tests::{ + check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, + }, DiagnosticsConfig, }; @@ -282,7 +284,7 @@ fn main() { cov_mark::check_count!(validate_match_bailed_out, 4); // Match statements with arms that don't match the // expression pattern do not fire this diagnostic. - check_diagnostics( + check_diagnostics_with_disabled( r#" enum Either { A, B } enum Either2 { C, D } @@ -307,6 +309,7 @@ fn main() { match Unresolved::Bar { Unresolved::Baz => () } } "#, + &["E0425"], ); } @@ -397,11 +400,11 @@ fn main() { match loop {} { Either::A => (), } - match loop { break Foo::A } { - //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered + match loop { break Either::A } { + //^^^^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered Either::A => (), } - match loop { break Foo::A } { + match loop { break Either::A } { Either::A => (), Either::B => (), } @@ -977,7 +980,7 @@ fn f(ty: Enum) { #[test] fn unexpected_ty_fndef() { cov_mark::check!(validate_match_bailed_out); - check_diagnostics( + check_diagnostics_with_disabled( r" enum Exp { Tuple(()), @@ -987,6 +990,7 @@ fn f() { Exp::Tuple => {} } }", + &["E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 8310af0f524..d5095b75469 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -95,7 +95,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option ! { loop {} } "#, + &["E0425"], ); check_fix( r#" @@ -422,7 +425,7 @@ fn never() -> ! { #[test] fn no_diagnostic_if_no_else_branch() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -432,12 +435,13 @@ fn test() { do_something_else(); } "#, + &["E0425"], ); } #[test] fn no_diagnostic_if_no_divergence() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -447,6 +451,7 @@ fn test() { } } "#, + &["E0425"], ); } @@ -462,7 +467,7 @@ fn test() { } } "#, - &["needless_return"], + &["needless_return", "E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/crates/ide-diagnostics/src/handlers/undeclared_label.rs index a6a0fdc655f..97943b7e8b3 100644 --- a/crates/ide-diagnostics/src/handlers/undeclared_label.rs +++ b/crates/ide-diagnostics/src/handlers/undeclared_label.rs @@ -38,10 +38,12 @@ fn foo() { fn while_let_loop_with_label_in_condition() { check_diagnostics( r#" +//- minicore: option + fn foo() { let mut optional = Some(0); - 'my_label: while let Some(a) = match optional { + 'my_label: while let Some(_) = match optional { None => break 'my_label, Some(val) => Some(val), } { @@ -59,8 +61,8 @@ fn foo() { r#" //- minicore: iterator fn foo() { - 'xxx: for _ in unknown { - 'yyy: for _ in unknown { + 'xxx: for _ in [] { + 'yyy: for _ in [] { break 'xxx; continue 'yyy; break 'zzz; diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 65abfd8a294..4c01a2d155a 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -78,7 +78,9 @@ fn method_fix( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config}, + tests::{ + check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, + }, DiagnosticsConfig, }; @@ -148,7 +150,7 @@ fn foo() { #[test] fn no_diagnostic_on_unknown() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn foo() { x.foo; @@ -156,6 +158,7 @@ fn foo() { (&((x,),),).foo; } "#, + &["E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs new file mode 100644 index 00000000000..295c8a2c615 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -0,0 +1,46 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: unresolved-ident +// +// This diagnostic is triggered if an expr-position ident is invalid. +pub(crate) fn unresolved_ident( + ctx: &DiagnosticsContext<'_>, + d: &hir::UnresolvedIdent, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0425"), + "no such value in this scope", + d.expr.map(Into::into), + ) + .experimental() +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn missing() { + check_diagnostics( + r#" +fn main() { + let _ = x; + //^ error: no such value in this scope +} +"#, + ); + } + + #[test] + fn present() { + check_diagnostics( + r#" +fn main() { + let x = 5; + let _ = x; +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 648d081898c..0614fdc5514 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -335,8 +335,8 @@ fn main() { r#" struct Foo { bar: i32 } fn foo() { - Foo { bar: i32 }.bar(); - // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists + Foo { bar: 0 }.bar(); + // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists } "#, ); diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 3a3888011d7..4428b8baafb 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -59,6 +59,7 @@ mod handlers { pub(crate) mod unresolved_assoc_item; pub(crate) mod unresolved_extern_crate; pub(crate) mod unresolved_field; + pub(crate) mod unresolved_ident; pub(crate) mod unresolved_import; pub(crate) mod unresolved_macro_call; pub(crate) mod unresolved_method; @@ -377,6 +378,7 @@ pub fn diagnostics( AnyDiagnostic::UnresolvedAssocItem(d) => handlers::unresolved_assoc_item::unresolved_assoc_item(&ctx, &d), AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d), AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d), + AnyDiagnostic::UnresolvedIdent(d) => handlers::unresolved_ident::unresolved_ident(&ctx, &d), AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d), AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d), AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d), From 1e448f84c3a05350af700de7903083572bba34fc Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 13:53:29 +0100 Subject: [PATCH 46/92] Clippy --- .../src/handlers/remove_unnecessary_else.rs | 9 ++++++--- crates/ide-diagnostics/src/tests.rs | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index d5095b75469..47844876dc5 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -179,7 +179,7 @@ fn test() { #[test] fn remove_unnecessary_else_for_return3() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test(a: bool) -> i32 { if a { @@ -190,6 +190,7 @@ fn test(a: bool) -> i32 { } } "#, + &["needless_return", "E0425"], ); check_fix( r#" @@ -473,7 +474,7 @@ fn test() { #[test] fn no_diagnostic_if_not_expr_stmt() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test1() { let _x = if a { @@ -495,13 +496,15 @@ fn test2() { }; } "#, + &["needless_return", "E0425"], ); - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test3() -> u8 { foo(if a { return 1 } else { 0 }) } "#, + &["E0425"], ); } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 9e134620ee3..901ceffbb26 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -200,7 +200,7 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { #[track_caller] pub(crate) fn check_diagnostics_with_disabled(ra_fixture: &str, disabled: &[&str]) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.extend(disabled.into_iter().map(|&s| s.to_owned())); + config.disabled.extend(disabled.iter().map(|&s| s.to_owned())); check_diagnostics_with_config(config, ra_fixture) } From d2b27d09ea075b36bac166e7ac029742510e8662 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 16:46:09 +0100 Subject: [PATCH 47/92] Don't populate rust_ir::AdtVariantDatum::fields for now due to perf --- crates/hir-ty/src/chalk_db.rs | 3 +++ crates/hir-ty/src/tests/coercion.rs | 4 +++- crates/hir-ty/src/tests/traits.rs | 12 ++++++------ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 49393f05a1a..40a195f7d95 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -742,6 +742,8 @@ pub(crate) fn adt_datum_query( phantom_data, }; + #[cfg(FALSE)] + // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it let variant_id_to_fields = |id: VariantId| { let variant_data = &id.variant_data(db.upcast()); let fields = if variant_data.fields().is_empty() { @@ -757,6 +759,7 @@ pub(crate) fn adt_datum_query( }; rust_ir::AdtVariantDatum { fields } }; + let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] }; let (kind, variants) = match adt_id { hir_def::AdtId::StructId(id) => { diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index bfb8df61a33..d56b15b9b74 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -536,7 +536,7 @@ fn test() { #[test] fn coerce_unsize_generic() { - check_no_mismatches( + check( r#" //- minicore: coerce_unsized struct Foo { t: T }; @@ -544,7 +544,9 @@ struct Bar(Foo); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; + //^^^^^^^^^^^^^^^^^^^^^ expected &Foo<[usize]>, got &Foo<[i32; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); + //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &Bar<[usize]>, got &Bar<[i32; 3]> } "#, ); diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 68cd6071ec7..879c69c758f 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4583,21 +4583,21 @@ fn f() { Struct::::IS_SEND; //^^^^^^^^^^^^^^^^^^^^Yes Struct::::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^Yes Struct::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes Enum::::IS_SEND; //^^^^^^^^^^^^^^^^^^Yes Enum::::IS_SEND; - //^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^Yes Enum::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^Yes Union::::IS_SEND; //^^^^^^^^^^^^^^^^^^^Yes Union::::IS_SEND; - //^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^Yes Union::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^^Yes PhantomData::::IS_SEND; //^^^^^^^^^^^^^^^^^^^^^^^^^Yes PhantomData::::IS_SEND; From a822291a025f495aacef9201807fce77971e8097 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 18:00:50 +0100 Subject: [PATCH 48/92] Infallible definition hovers --- crates/ide-db/src/defs.rs | 13 ++-- crates/ide/src/hover.rs | 37 ++++++------ crates/ide/src/hover/render.rs | 14 ++--- crates/ide/src/hover/tests.rs | 105 +++++++++++++++++++++++++++++++-- crates/ide/src/static_index.rs | 4 +- 5 files changed, 135 insertions(+), 38 deletions(-) diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index d95d94ec72e..747c90561de 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -213,8 +213,8 @@ impl Definition { }) } - pub fn label(&self, db: &RootDatabase) -> Option { - let label = match *self { + pub fn label(&self, db: &RootDatabase) -> String { + match *self { Definition::Macro(it) => it.display(db).to_string(), Definition::Field(it) => it.display(db).to_string(), Definition::TupleField(it) => it.display(db).to_string(), @@ -241,7 +241,11 @@ impl Definition { } } Definition::SelfType(impl_def) => { - impl_def.self_ty(db).as_adt().and_then(|adt| Definition::Adt(adt).label(db))? + let self_ty = &impl_def.self_ty(db); + match self_ty.as_adt() { + Some(it) => it.display(db).to_string(), + None => self_ty.display(db).to_string(), + } } Definition::GenericParam(it) => it.display(db).to_string(), Definition::Label(it) => it.name(db).display(db).to_string(), @@ -249,8 +253,7 @@ impl Definition { Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), Definition::ToolModule(it) => it.name(db).to_string(), Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), - }; - Some(label) + } } } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 19b181ae3b6..4a7350feb38 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -147,7 +147,7 @@ fn hover_simple( if let Some(doc_comment) = token_as_doc_comment(&original_token) { cov_mark::hit!(no_highlight_on_comment_hover); return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| { - let res = hover_for_definition(sema, file_id, def, &node, config)?; + let res = hover_for_definition(sema, file_id, def, &node, config); Some(RangeInfo::new(range, res)) }); } @@ -161,7 +161,7 @@ fn hover_simple( Definition::from(resolution?), &original_token.parent()?, config, - )?; + ); return Some(RangeInfo::new(range, res)); } @@ -215,7 +215,7 @@ fn hover_simple( }) .flatten() .unique_by(|&(def, _)| def) - .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) + .map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { acc.actions.extend(actions); acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); @@ -373,9 +373,9 @@ pub(crate) fn hover_for_definition( def: Definition, scope_node: &SyntaxNode, config: &HoverConfig, -) -> Option { +) -> HoverResult { let famous_defs = match &def { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + Definition::BuiltinType(_) => sema.scope(scope_node).map(|it| FamousDefs(sema, it.krate())), _ => None, }; @@ -396,20 +396,19 @@ pub(crate) fn hover_for_definition( }; let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default(); - render::definition(sema.db, def, famous_defs.as_ref(), ¬able_traits, config).map(|markup| { - HoverResult { - markup: render::process_markup(sema.db, def, &markup, config), - actions: [ - show_implementations_action(sema.db, def), - show_fn_references_action(sema.db, def), - runnable_action(sema, def, file_id), - goto_type_action_for_def(sema.db, def, ¬able_traits), - ] - .into_iter() - .flatten() - .collect(), - } - }) + let markup = render::definition(sema.db, def, famous_defs.as_ref(), ¬able_traits, config); + HoverResult { + markup: render::process_markup(sema.db, def, &markup, config), + actions: [ + show_implementations_action(sema.db, def), + show_fn_references_action(sema.db, def), + runnable_action(sema, def, file_id), + goto_type_action_for_def(sema.db, def, ¬able_traits), + ] + .into_iter() + .flatten() + .collect(), + } } fn notable_traits( diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index eff055c9599..42342d94b6d 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -264,7 +264,7 @@ pub(super) fn keyword( let markup = process_markup( sema.db, Definition::Module(doc_owner), - &markup(Some(docs.into()), description, None)?, + &markup(Some(docs.into()), description, None), config, ); Some(HoverResult { markup, actions }) @@ -396,11 +396,11 @@ pub(super) fn definition( famous_defs: Option<&FamousDefs<'_, '_>>, notable_traits: &[(Trait, Vec<(Option, Name)>)], config: &HoverConfig, -) -> Option { +) -> Markup { let mod_path = definition_mod_path(db, &def); - let label = def.label(db)?; + let label = def.label(db); let docs = def.docs(db, famous_defs); - let value = match def { + let value = (|| match def { Definition::Variant(it) => { if !it.parent_enum(db).is_data_carrying(db) { match it.eval(db) { @@ -436,7 +436,7 @@ pub(super) fn definition( Some(body.to_string()) } _ => None, - }; + })(); let layout_info = match def { Definition::Field(it) => render_memory_layout( @@ -683,7 +683,7 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) } -fn markup(docs: Option, desc: String, mod_path: Option) -> Option { +fn markup(docs: Option, desc: String, mod_path: Option) -> Markup { let mut buf = String::new(); if let Some(mod_path) = mod_path { @@ -696,7 +696,7 @@ fn markup(docs: Option, desc: String, mod_path: Option) -> Optio if let Some(doc) = docs { format_to!(buf, "\n___\n\n{}", doc); } - Some(buf.into()) + buf.into() } fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 69ddc1e45ef..157f8ff371e 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -1279,11 +1279,11 @@ impl Thing { ); check( r#" - enum Thing { A } - impl Thing { - pub fn thing(a: Self$0) {} - } - "#, +enum Thing { A } +impl Thing { + pub fn thing(a: Self$0) {} +} +"#, expect![[r#" *Self* @@ -1298,6 +1298,42 @@ impl Thing { ``` "#]], ); + check( + r#" +impl usize { + pub fn thing(a: Self$0) {} +} +"#, + expect![[r#" + *Self* + + ```rust + test + ``` + + ```rust + usize + ``` + "#]], + ); + check( + r#" +impl fn() -> usize { + pub fn thing(a: Self$0) {} +} +"#, + expect![[r#" + *Self* + + ```rust + test + ``` + + ```rust + fn() -> usize + ``` + "#]], + ); } #[test] @@ -7201,6 +7237,65 @@ impl Iterator for S { ); } +#[test] +fn extern_items() { + check( + r#" +extern "C" { + static STATIC$0: (); +} +"#, + expect![[r#" + *STATIC* + + ```rust + test + ``` + + ```rust + static STATIC: () + ``` + "#]], + ); + check( + r#" +extern "C" { + fn fun$0(); +} +"#, + expect![[r#" + *fun* + + ```rust + test + ``` + + ```rust + unsafe fn fun() + ``` + "#]], + ); + check( + r#" +extern "C" { + type Ty$0; +} +"#, + expect![[r#" + *Ty* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type Ty + ``` + "#]], + ); +} + #[test] fn notable_ranged() { check_hover_range( diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 5feaf21aa97..2929a7522e5 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -186,7 +186,7 @@ impl StaticIndex<'_> { } else { let it = self.tokens.insert(TokenStaticData { documentation: documentation_for_definition(&sema, def, &node), - hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), + hover: Some(hover_for_definition(&sema, file_id, def, &node, &hover_config)), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } }), @@ -196,7 +196,7 @@ impl StaticIndex<'_> { enclosing_moniker: current_crate .zip(def.enclosing_definition(self.db)) .and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)), - signature: def.label(self.db), + signature: Some(def.label(self.db)), kind: def_to_kind(self.db, def), }); self.def_map.insert(def, it); From 47b21730c4bf6ff2a93929e58009646fd022e3c8 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Wed, 31 Jan 2024 03:24:24 +0100 Subject: [PATCH 49/92] Factor out unspecialization --- .../rustc_pattern_analysis/src/usefulness.rs | 36 +++++++++++-------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index d35b0248e41..ffea76e4d53 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -1118,6 +1118,25 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> { } Ok(matrix) } + + /// Recover row usefulness and intersection information from a processed specialized matrix. + /// `specialized` must come from `self.specialize_constructor`. + fn unspecialize(&mut self, specialized: Self) { + for child_row in specialized.rows() { + let parent_row_id = child_row.parent_row; + let parent_row = &mut self.rows[parent_row_id]; + // A parent row is useful if any of its children is. + parent_row.useful |= child_row.useful; + for child_intersection in child_row.intersects.iter() { + // Convert the intersecting ids into ids for the parent matrix. + let parent_intersection = specialized.rows[child_intersection].parent_row; + // Note: self-intersection can happen with or-patterns. + if parent_intersection != parent_row_id { + parent_row.intersects.insert(parent_intersection); + } + } + } + } } /// Pretty-printer for matrices of patterns, example: @@ -1542,21 +1561,6 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( // Accumulate the found witnesses. ret.extend(witnesses); - for child_row in spec_matrix.rows() { - let parent_row_id = child_row.parent_row; - let parent_row = &mut matrix.rows[parent_row_id]; - // A parent row is useful if any of its children is. - parent_row.useful |= child_row.useful; - for child_intersection in child_row.intersects.iter() { - // Convert the intersecting ids into ids for the parent matrix. - let parent_intersection = spec_matrix.rows[child_intersection].parent_row; - // Note: self-intersection can happen with or-patterns. - if parent_intersection != parent_row_id { - parent_row.intersects.insert(parent_intersection); - } - } - } - // Detect ranges that overlap on their endpoints. if let Constructor::IntRange(overlap_range) = ctor { if overlap_range.is_singleton() @@ -1566,6 +1570,8 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( collect_overlapping_range_endpoints(mcx, overlap_range, matrix, &spec_matrix); } } + + matrix.unspecialize(spec_matrix); } // Record usefulness in the patterns. From d93096ecc0cb530d851ebbd58dce6cd2e68c850f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 10:40:39 +0100 Subject: [PATCH 50/92] internal: Fetch toolchain and datalayout for DetachedFiles --- crates/hir-def/src/body/pretty.rs | 4 +- crates/hir-def/src/import_map.rs | 4 +- crates/hir-def/src/item_tree.rs | 6 +- crates/hir-def/src/item_tree/lower.rs | 26 ++++- crates/hir-def/src/item_tree/pretty.rs | 9 +- crates/hir-def/src/nameres.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 2 +- crates/hir-def/src/nameres/tests/macros.rs | 3 - .../src/diagnostics/match_check/pat_util.rs | 2 +- crates/hir-ty/src/mir/eval/shim.rs | 8 +- crates/hir-ty/src/mir/eval/shim/simd.rs | 1 + crates/hir-ty/src/mir/lower.rs | 10 +- .../hir-ty/src/mir/lower/pattern_matching.rs | 13 ++- .../src/handlers/generate_delegate_methods.rs | 2 +- .../src/completions/item_list/trait_impl.rs | 2 +- crates/ide-db/src/imports/insert_use/tests.rs | 1 - crates/ide-db/src/symbol_index.rs | 3 +- crates/ide/src/join_lines.rs | 1 - crates/load-cargo/src/lib.rs | 15 ++- crates/project-model/src/workspace.rs | 96 ++++++++++++++----- crates/rust-analyzer/src/cargo_target_spec.rs | 3 +- crates/rust-analyzer/src/cli/lsif.rs | 2 +- crates/rust-analyzer/src/cli/rustc_tests.rs | 22 +++-- crates/rust-analyzer/src/cli/scip.rs | 2 +- crates/salsa/salsa-macros/src/query_group.rs | 1 - crates/salsa/src/debug.rs | 1 - crates/salsa/src/derived.rs | 1 - crates/salsa/src/input.rs | 1 - crates/salsa/src/interned.rs | 1 - 29 files changed, 164 insertions(+), 80 deletions(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 7007dea638e..cd14f7b855a 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -6,8 +6,8 @@ use itertools::Itertools; use crate::{ hir::{ - Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, LiteralOrConst, - Movability, Statement, + Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability, + Statement, }, pretty::{print_generic_args, print_path, print_type_ref}, type_ref::TypeRef, diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 98982c7db84..faa1eed15a4 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -3,7 +3,7 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir_expand::name::Name; use indexmap::IndexMap; use itertools::Itertools; @@ -477,7 +477,7 @@ mod tests { use expect_test::{expect, Expect}; use test_fixture::WithFixture; - use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; + use crate::{test_db::TestDB, ItemContainerId, Lookup}; use super::*; diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index be16a5e31a2..bb36950f95a 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -44,13 +44,13 @@ use std::{ ops::{Index, Range}, }; -use ast::{AstNode, HasName, StructKind}; +use ast::{AstNode, StructKind}; use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::RawAttrs, - name::{name, AsName, Name}, + name::Name, ExpandTo, HirFileId, InFile, }; use intern::Interned; @@ -67,7 +67,7 @@ use crate::{ attr::Attrs, db::DefDatabase, generics::{GenericParams, LifetimeParamData, TypeOrConstParamData}, - path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, + path::{GenericArgs, ImportAlias, ModPath, Path, PathKind}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, visibility::{RawVisibility, VisibilityExplicitness}, BlockId, Lookup, diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index e0aa3ae6123..37fdece8768 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,17 +2,33 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; -use syntax::ast::{self, HasModuleItem, HasTypeBounds, IsString}; +use hir_expand::{ + ast_id_map::AstIdMap, mod_path::path, name, name::AsName, span_map::SpanMapRef, HirFileId, +}; +use la_arena::Arena; +use syntax::{ + ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString}, + AstNode, +}; +use triomphe::Arc; use crate::{ + db::DefDatabase, generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, - type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + item_tree::{ + AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldAstId, + Fields, FileItemTreeId, FnFlags, Function, GenericArgs, Idx, IdxRange, Impl, ImportAlias, + Interned, ItemTree, ItemTreeData, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, + ModItem, ModKind, ModPath, Mutability, Name, Param, ParamAstId, Path, Range, RawAttrs, + RawIdx, RawVisibilityId, Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, + Use, UseTree, UseTreeKind, Variant, + }, + path::AssociatedTypeBinding, + type_ref::{LifetimeRef, TraitBoundModifier, TraitRef, TypeBound, TypeRef}, + visibility::RawVisibility, LocalLifetimeParamId, LocalTypeOrConstParamId, }; -use super::*; - fn id(index: Idx) -> FileItemTreeId { FileItemTreeId(index) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 0086b7180b2..87c90a4c6ab 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -6,12 +6,17 @@ use span::ErasedFileAstId; use crate::{ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, + item_tree::{ + AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldAstId, Fields, + FileItemTreeId, FnFlags, Function, GenericParams, Impl, Interned, ItemTree, Macro2, + MacroCall, MacroRules, Mod, ModItem, ModKind, Param, ParamAstId, Path, RawAttrs, + RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, TypeRef, Union, + Use, UseTree, UseTreeKind, Variant, + }, pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, }; -use super::*; - pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String { let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 2a9390e7978..a2eca066438 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -57,7 +57,7 @@ pub mod proc_macro; #[cfg(test)] mod tests; -use std::{cmp::Ord, ops::Deref}; +use std::ops::Deref; use base_db::{CrateId, Edition, FileId}; use hir_expand::{ diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 88838f58fe7..32825406505 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -2446,7 +2446,7 @@ mod tests { use base_db::SourceDatabase; use test_fixture::WithFixture; - use crate::{db::DefDatabase, test_db::TestDB}; + use crate::test_db::TestDB; use super::*; diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index bf89ea711a0..d278b75e815 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -1,10 +1,7 @@ use expect_test::expect; -use test_fixture::WithFixture; use itertools::Itertools; -use crate::nameres::tests::check; - use super::*; #[test] diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/crates/hir-ty/src/diagnostics/match_check/pat_util.rs index 217454499ef..c6a26cdd1d0 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_util.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_util.rs @@ -2,7 +2,7 @@ //! //! Originates from `rustc_hir::pat_util` -use std::iter::{Enumerate, ExactSizeIterator}; +use std::iter::Enumerate; pub(crate) struct EnumerateAndAdjust { enumerate: Enumerate, diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index d68803fe280..cd992d07602 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -8,9 +8,13 @@ use hir_def::{ builtin_type::{BuiltinInt, BuiltinUint}, resolver::HasResolver, }; -use hir_expand::mod_path::ModPath; -use super::*; +use crate::mir::eval::{ + name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, + HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, + IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, + ModPath, Mutability, Result, Substitution, Ty, TyBuilder, TyExt, +}; mod simd; diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs index eddfd0acfb9..e229a4ab317 100644 --- a/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -2,6 +2,7 @@ use std::cmp::Ordering; +use crate::consteval::try_const_usize; use crate::TyKind; use super::*; diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index b038900cdac..ed316f97268 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -31,14 +31,20 @@ use crate::{ inhabitedness::is_ty_uninhabited_from, layout::LayoutError, mapping::ToChalk, + mir::{ + intern_const_scalar, return_slot, AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, + BorrowKind, CastKind, ClosureId, ConstScalar, Either, Expr, FieldId, Idx, InferenceResult, + Interner, Local, LocalId, MemoryMap, MirBody, MirSpan, Mutability, Operand, Place, + PlaceElem, PointerCast, ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement, + StatementKind, Substitution, SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty, + UnOp, VariantId, + }, static_lifetime, traits::FnTrait, utils::{generics, ClosureSubst}, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, }; -use super::*; - mod as_place; mod pattern_matching; diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index 8202bac532f..85c8d1685b8 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -2,9 +2,16 @@ use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; -use crate::BindingMode; - -use super::*; +use crate::{ + mir::lower::{ + BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner, + MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place, + PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue, + Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind, + ValueNs, VariantData, VariantId, + }, + BindingMode, +}; macro_rules! not_supported { ($x: expr) => { diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 4f2df5633c3..38f40b8d58b 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,4 +1,4 @@ -use hir::{self, HasCrate, HasVisibility}; +use hir::{HasCrate, HasVisibility}; use ide_db::{path_transform::PathTransform, FxHashSet}; use syntax::{ ast::{ diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 3c4b89ca742..7394d63be58 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -31,7 +31,7 @@ //! } //! ``` -use hir::{self, HasAttrs}; +use hir::HasAttrs; use ide_db::{ documentation::HasDocs, path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind, diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 6b0fecae267..10c285a13fb 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1,4 +1,3 @@ -use hir::PrefixKind; use stdx::trim_indent; use test_fixture::WithFixture; use test_utils::{assert_eq_text, CURSOR_MARKER}; diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index 92c09089e1f..c65467a4324 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -31,7 +31,7 @@ use base_db::{ salsa::{self, ParallelDatabase}, SourceDatabaseExt, SourceRootId, Upcast, }; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir::{ db::HirDatabase, import_map::{AssocSearchMode, SearchMode}, @@ -394,7 +394,6 @@ impl Query { mod tests { use expect_test::expect_file; - use hir::symbols::SymbolCollector; use test_fixture::WithFixture; use super::*; diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index fef0ec35ba0..815a4ba7fd7 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -303,7 +303,6 @@ fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { #[cfg(test)] mod tests { - use syntax::SourceFile; use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; use super::*; diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 8c5592da63e..830d19a709c 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -309,6 +309,10 @@ fn load_crate_graph( vfs: &mut vfs::Vfs, receiver: &Receiver, ) -> AnalysisHost { + let (ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } + | ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws; + let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::().ok()); let mut host = AnalysisHost::new(lru_cap); let mut analysis_change = Change::new(); @@ -344,14 +348,9 @@ fn load_crate_graph( let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); - if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } - | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws - { - analysis_change.set_target_data_layouts( - iter::repeat(target_layout.clone()).take(num_crates).collect(), - ); - analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); - } + analysis_change + .set_target_data_layouts(iter::repeat(target_layout.clone()).take(num_crates).collect()); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); host.apply_change(analysis_change); host diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index b7ae76be8ce..bcb5dcadb5b 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -100,6 +100,8 @@ pub enum ProjectWorkspace { /// Holds cfg flags for the current target. We get those by running /// `rustc --print cfg`. rustc_cfg: Vec, + toolchain: Option, + target_layout: TargetLayoutLoadResult, }, } @@ -145,16 +147,24 @@ impl fmt::Debug for ProjectWorkspace { debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); } debug_struct - .field("toolchain", &toolchain) .field("n_rustc_cfg", &rustc_cfg.len()) + .field("toolchain", &toolchain) .field("data_layout", &data_layout); debug_struct.finish() } - ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f + ProjectWorkspace::DetachedFiles { + files, + sysroot, + rustc_cfg, + toolchain, + target_layout, + } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) .field("sysroot", &sysroot.is_ok()) .field("n_rustc_cfg", &rustc_cfg.len()) + .field("toolchain", &toolchain) + .field("data_layout", &target_layout) .finish(), } } @@ -403,32 +413,54 @@ impl ProjectWorkspace { detached_files: Vec, config: &CargoConfig, ) -> anyhow::Result { + let dir = detached_files + .first() + .and_then(|it| it.parent()) + .ok_or_else(|| format_err!("No detached files to load"))?; let sysroot = match &config.sysroot { Some(RustLibSource::Path(path)) => { Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata) .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))) } - Some(RustLibSource::Discover) => { - let dir = &detached_files - .first() - .and_then(|it| it.parent()) - .ok_or_else(|| format_err!("No detached files to load"))?; - Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata).map_err( - |e| { - Some(format!( - "Failed to find sysroot for {dir}. Is rust-src installed? {e}" - )) - }, - ) - } + Some(RustLibSource::Discover) => Sysroot::discover( + dir, + &config.extra_env, + config.sysroot_query_metadata, + ) + .map_err(|e| { + Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}")) + }), None => Err(None), }; - let rustc_cfg = rustc_cfg::get( + + let sysroot_ref = sysroot.as_ref().ok(); + let toolchain = match get_toolchain_version( + dir, + sysroot_ref, + toolchain::Tool::Rustc, + &config.extra_env, + "rustc ", + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("{e}"); + None + } + }; + + let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref)); + let data_layout = target_data_layout::get( + RustcDataLayoutConfig::Rustc(sysroot_ref), None, - &FxHashMap::default(), - RustcCfgConfig::Rustc(sysroot.as_ref().ok()), + &config.extra_env, ); - Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) + Ok(ProjectWorkspace::DetachedFiles { + files: detached_files, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + }) } /// Runs the build scripts for this [`ProjectWorkspace`]. @@ -724,7 +756,13 @@ impl ProjectWorkspace { cfg_overrides, build_scripts, ), - ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { + ProjectWorkspace::DetachedFiles { + files, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => { detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; @@ -786,9 +824,21 @@ impl ProjectWorkspace { && toolchain == o_toolchain } ( - Self::DetachedFiles { files, sysroot, rustc_cfg }, - Self::DetachedFiles { files: o_files, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg }, - ) => files == o_files && sysroot == o_sysroot && rustc_cfg == o_rustc_cfg, + Self::DetachedFiles { files, sysroot, rustc_cfg, toolchain, target_layout }, + Self::DetachedFiles { + files: o_files, + sysroot: o_sysroot, + rustc_cfg: o_rustc_cfg, + toolchain: o_toolchain, + target_layout: o_target_layout, + }, + ) => { + files == o_files + && sysroot == o_sysroot + && rustc_cfg == o_rustc_cfg + && toolchain == o_toolchain + && target_layout == o_target_layout + } _ => false, } } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 9a9357a5398..815a98980b9 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -4,7 +4,7 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId}; -use project_model::{self, CargoFeatures, ManifestPath, TargetKind}; +use project_model::{CargoFeatures, ManifestPath, TargetKind}; use rustc_hash::FxHashSet; use vfs::AbsPathBuf; @@ -208,7 +208,6 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec) { mod tests { use super::*; - use cfg::CfgExpr; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 1424a775777..5e810463db6 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -13,7 +13,7 @@ use ide_db::{ LineIndexDatabase, }; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; -use lsp_types::{self, lsif}; +use lsp_types::lsif; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rustc_hash::FxHashMap; use vfs::{AbsPathBuf, Vfs}; diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index 64ea246a458..25f84d770bf 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -5,7 +5,8 @@ use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::Path use hir::{Change, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use profile::StopWatch; -use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; +use project_model::target_data_layout::RustcDataLayoutConfig; +use project_model::{target_data_layout, CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use rustc_hash::FxHashMap; @@ -60,15 +61,22 @@ impl Tester { std::fs::write(&tmp_file, "")?; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; + + let sysroot = + Ok(Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false) + .unwrap()); + let data_layout = target_data_layout::get( + RustcDataLayoutConfig::Rustc(sysroot.as_ref().ok()), + None, + &cargo_config.extra_env, + ); + let workspace = ProjectWorkspace::DetachedFiles { files: vec![tmp_file.clone()], - sysroot: Ok(Sysroot::discover( - tmp_file.parent().unwrap(), - &cargo_config.extra_env, - false, - ) - .unwrap()), + sysroot, rustc_cfg: vec![], + toolchain: None, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: false, diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 2d56830c87f..27869a5a7e6 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -324,7 +324,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { #[cfg(test)] mod test { use super::*; - use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; + use ide::{AnalysisHost, FilePosition, TextSize}; use scip::symbol::format_symbol; use test_fixture::ChangeFixture; diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs index e535d7ed043..5d1678ef120 100644 --- a/crates/salsa/salsa-macros/src/query_group.rs +++ b/crates/salsa/salsa-macros/src/query_group.rs @@ -1,5 +1,4 @@ //! -use std::{convert::TryFrom, iter::FromIterator}; use crate::parenthesized::Parenthesized; use heck::ToUpperCamelCase; diff --git a/crates/salsa/src/debug.rs b/crates/salsa/src/debug.rs index 0925ddb3d85..5f113541f04 100644 --- a/crates/salsa/src/debug.rs +++ b/crates/salsa/src/debug.rs @@ -5,7 +5,6 @@ use crate::durability::Durability; use crate::plumbing::QueryStorageOps; use crate::Query; use crate::QueryTable; -use std::iter::FromIterator; /// Additional methods on queries that can be used to "peek into" /// their current state. These methods are meant for debugging and diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index c381e66e087..d6316710058 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -13,7 +13,6 @@ use crate::Runtime; use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; use parking_lot::RwLock; use std::borrow::Borrow; -use std::convert::TryFrom; use std::hash::Hash; use std::marker::PhantomData; use triomphe::Arc; diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs index 4e8fca6149b..c2539570e0f 100644 --- a/crates/salsa/src/input.rs +++ b/crates/salsa/src/input.rs @@ -14,7 +14,6 @@ use crate::Runtime; use crate::{DatabaseKeyIndex, QueryDb}; use indexmap::map::Entry; use parking_lot::RwLock; -use std::convert::TryFrom; use std::iter; use tracing::debug; diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs index 731839e9598..822219f5185 100644 --- a/crates/salsa/src/interned.rs +++ b/crates/salsa/src/interned.rs @@ -13,7 +13,6 @@ use crate::{Database, DatabaseKeyIndex, QueryDb}; use parking_lot::RwLock; use rustc_hash::FxHashMap; use std::collections::hash_map::Entry; -use std::convert::From; use std::fmt::Debug; use std::hash::Hash; use triomphe::Arc; From 85203d97216e88f1bc2df9eb5e8d1d0bd9d93118 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 18:14:48 +0100 Subject: [PATCH 51/92] Render assoc item owner in hover for items other than functions --- crates/hir/src/lib.rs | 88 ++++++++++++++++++++++++++++++++++ crates/ide-db/src/defs.rs | 21 ++++++-- crates/ide/src/hover/render.rs | 22 ++++++--- crates/ide/src/hover/tests.rs | 32 ++++++------- 4 files changed, 135 insertions(+), 28 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index beaa6dd4d67..2d8811cf5eb 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2653,6 +2653,37 @@ impl ItemInNs { } } +/// Invariant: `inner.as_extern_assoc_item(db).is_some()` +/// We do not actively enforce this invariant. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum ExternAssocItem { + Function(Function), + Static(Static), + TypeAlias(TypeAlias), +} + +pub trait AsExternAssocItem { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option; +} + +impl AsExternAssocItem for Function { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::Function, self.id) + } +} + +impl AsExternAssocItem for Static { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::Static, self.id) + } +} + +impl AsExternAssocItem for TypeAlias { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::TypeAlias, self.id) + } +} + /// Invariant: `inner.as_assoc_item(db).is_some()` /// We do not actively enforce this invariant. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -2727,6 +2758,63 @@ where } } +fn as_extern_assoc_item<'db, ID, DEF, LOC>( + db: &(dyn HirDatabase + 'db), + ctor: impl FnOnce(DEF) -> ExternAssocItem, + id: ID, +) -> Option +where + ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + DEF: From, + LOC: ItemTreeNode, +{ + match id.lookup(db.upcast()).container { + ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))), + ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) | ItemContainerId::ModuleId(_) => { + None + } + } +} + +impl ExternAssocItem { + pub fn name(self, db: &dyn HirDatabase) -> Name { + match self { + Self::Function(it) => it.name(db), + Self::Static(it) => it.name(db), + Self::TypeAlias(it) => it.name(db), + } + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + Self::Function(f) => f.module(db), + Self::Static(c) => c.module(db), + Self::TypeAlias(t) => t.module(db), + } + } + + pub fn as_function(self) -> Option { + match self { + Self::Function(v) => Some(v), + _ => None, + } + } + + pub fn as_static(self) -> Option { + match self { + Self::Static(v) => Some(v), + _ => None, + } + } + + pub fn as_type_alias(self) -> Option { + match self { + Self::TypeAlias(v) => Some(v), + _ => None, + } + } +} + impl AssocItem { pub fn name(self, db: &dyn HirDatabase) -> Option { match self { diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 747c90561de..1b6ff8bad53 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -8,11 +8,11 @@ use arrayvec::ArrayVec; use either::Either; use hir::{ - Adt, AsAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate, - DefWithBody, DeriveHelper, DocLinkDef, ExternCrateDecl, Field, Function, GenericParam, - HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, ModuleDef, Name, PathResolution, - Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, - Visibility, + Adt, AsAssocItem, AsExternAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, + Const, Crate, DefWithBody, DeriveHelper, DocLinkDef, ExternAssocItem, ExternCrateDecl, Field, + Function, GenericParam, HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, + ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, + TypeAlias, Variant, VariantDef, Visibility, }; use stdx::{format_to, impl_from}; use syntax::{ @@ -742,6 +742,17 @@ impl AsAssocItem for Definition { } } +impl AsExternAssocItem for Definition { + fn as_extern_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option { + match self { + Definition::Function(it) => it.as_extern_assoc_item(db), + Definition::Static(it) => it.as_extern_assoc_item(db), + Definition::TypeAlias(it) => it.as_extern_assoc_item(db), + _ => None, + } + } +} + impl From for Definition { fn from(assoc_item: AssocItem) -> Self { match assoc_item { diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 42342d94b6d..563e78253a8 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -3,8 +3,8 @@ use std::{mem, ops::Not}; use either::Either; use hir::{ - Adt, AsAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, LayoutError, Name, - Semantics, Trait, Type, TypeInfo, + Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, + LayoutError, Name, Semantics, Trait, Type, TypeInfo, }; use ide_db::{ base_db::SourceDatabase, @@ -369,12 +369,20 @@ fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option match def { Definition::Field(f) => Some(f.parent_def(db).name(db)), Definition::Local(l) => l.parent(db).name(db), - Definition::Function(f) => match f.as_assoc_item(db)?.container(db) { - hir::AssocItemContainer::Trait(t) => Some(t.name(db)), - hir::AssocItemContainer::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)), - }, Definition::Variant(e) => Some(e.parent_enum(db).name(db)), - _ => None, + + d => { + if let Some(assoc_item) = d.as_assoc_item(db) { + match assoc_item.container(db) { + hir::AssocItemContainer::Trait(t) => Some(t.name(db)), + hir::AssocItemContainer::Impl(i) => { + i.self_ty(db).as_adt().map(|adt| adt.name(db)) + } + } + } else { + return d.as_extern_assoc_item(db).map(|_| "".to_owned()); + } + } } .map(|name| name.display(db).to_string()) } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 157f8ff371e..ead4f91595f 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -1202,7 +1202,7 @@ fn main() { *C* ```rust - test + test::X ``` ```rust @@ -2277,7 +2277,7 @@ fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } } *foo* ```rust - test + test:: ``` ```rust @@ -4266,7 +4266,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4295,7 +4295,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4327,7 +4327,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4919,7 +4919,7 @@ fn test() { *FOO* ```rust - test + test::S ``` ```rust @@ -5284,7 +5284,7 @@ impl T1 for Foo { *Bar* ```rust - test::t2 + test::t2::T2 ``` ```rust @@ -5306,7 +5306,7 @@ trait A { *Assoc* ```rust - test + test::A ``` ```rust @@ -5327,7 +5327,7 @@ trait A { *Assoc* ```rust - test + test::A ``` ```rust @@ -5346,7 +5346,7 @@ trait A where *Assoc* ```rust - test + test::A ``` ```rust @@ -6632,7 +6632,7 @@ fn test() { *A* ```rust - test + test::S ``` ```rust @@ -6661,7 +6661,7 @@ fn test() { *A* ```rust - test + test::S ``` ```rust @@ -6691,7 +6691,7 @@ mod m { *A* ```rust - test + test::S ``` ```rust @@ -7249,7 +7249,7 @@ extern "C" { *STATIC* ```rust - test + test:: ``` ```rust @@ -7267,7 +7267,7 @@ extern "C" { *fun* ```rust - test + test:: ``` ```rust @@ -7285,7 +7285,7 @@ extern "C" { *Ty* ```rust - test + test:: ``` ```rust From 06d6c62f80c2729f2ba3129c43c2cd3417b50251 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:28:27 +0000 Subject: [PATCH 52/92] Add newtype for raw idents --- compiler/rustc_ast/src/token.rs | 38 +++++++++++++----- compiler/rustc_ast/src/tokenstream.rs | 2 +- compiler/rustc_ast_pretty/src/pprust/state.rs | 8 ++-- compiler/rustc_builtin_macros/src/asm.rs | 3 +- .../src/assert/context.rs | 8 ++-- compiler/rustc_expand/src/mbe/macro_check.rs | 4 +- compiler/rustc_expand/src/mbe/macro_rules.rs | 14 +++++-- compiler/rustc_expand/src/mbe/metavar_expr.rs | 4 +- compiler/rustc_expand/src/mbe/quoted.rs | 4 +- compiler/rustc_expand/src/parse/tests.rs | 40 ++++++++++++++----- .../rustc_expand/src/proc_macro_server.rs | 13 +++--- .../rustc_expand/src/tokenstream/tests.rs | 5 ++- compiler/rustc_lint/src/builtin.rs | 2 +- compiler/rustc_parse/src/lexer/mod.rs | 8 ++-- .../rustc_parse/src/lexer/unicode_chars.rs | 2 +- .../rustc_parse/src/parser/diagnostics.rs | 13 +++--- compiler/rustc_parse/src/parser/expr.rs | 38 ++++++++++-------- compiler/rustc_parse/src/parser/item.rs | 7 ++-- compiler/rustc_parse/src/parser/mod.rs | 9 +++-- .../rustc_parse/src/parser/nonterminal.rs | 2 +- compiler/rustc_parse/src/parser/pat.rs | 2 +- compiler/rustc_parse/src/parser/path.rs | 3 +- compiler/rustc_parse/src/parser/stmt.rs | 2 +- src/librustdoc/clean/render_macro_matchers.rs | 4 +- 24 files changed, 148 insertions(+), 87 deletions(-) diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 50fe37dcdb6..5ccc7d51066 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -107,7 +107,7 @@ impl Lit { /// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation. pub fn from_token(token: &Token) -> Option { match token.uninterpolate().kind { - Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Literal(token_lit) => Some(token_lit), Interpolated(ref nt) if let NtExpr(expr) | NtLiteral(expr) = &nt.0 @@ -183,7 +183,7 @@ impl LitKind { } } -pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { +pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } -fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { +fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } +#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)] +pub enum IdentIsRaw { + No, + Yes, +} + +impl From for IdentIsRaw { + fn from(b: bool) -> Self { + if b { Self::Yes } else { Self::No } + } +} + +impl From for bool { + fn from(is_raw: IdentIsRaw) -> bool { + matches!(is_raw, IdentIsRaw::Yes) + } +} + // SAFETY: due to the `Clone` impl below, all fields of all variants other than // `Interpolated` must impl `Copy`. #[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -298,7 +316,7 @@ pub enum TokenKind { /// Do not forget about `NtIdent` when you want to match on identifiers. /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to /// treat regular and interpolated identifiers in the same way. - Ident(Symbol, /* is_raw */ bool), + Ident(Symbol, IdentIsRaw), /// Lifetime identifier token. /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to @@ -411,7 +429,7 @@ impl Token { /// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: Ident) -> Self { - Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span) + Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span) } /// For interpolated tokens, returns a span of the fragment to which the interpolated @@ -567,7 +585,7 @@ impl Token { pub fn can_begin_literal_maybe_minus(&self) -> bool { match self.uninterpolate().kind { Literal(..) | BinOp(Minus) => true, - Ident(name, false) if name.is_bool_lit() => true, + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, Interpolated(ref nt) => match &nt.0 { NtLiteral(_) => true, NtExpr(e) => match &e.kind { @@ -602,7 +620,7 @@ impl Token { /// Returns an identifier if this token is an identifier. #[inline] - pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> { + pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> { // We avoid using `Token::uninterpolate` here because it's slow. match &self.kind { &Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), @@ -755,7 +773,7 @@ impl Token { /// Returns `true` if the token is a non-raw identifier for which `pred` holds. pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool { match self.ident() { - Some((id, false)) => pred(id), + Some((id, IdentIsRaw::No)) => pred(id), _ => false, } } @@ -806,7 +824,7 @@ impl Token { _ => return None, }, SingleQuote => match joint.kind { - Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))), + Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))), _ => return None, }, @@ -836,7 +854,7 @@ pub enum Nonterminal { NtPat(P), NtExpr(P), NtTy(P), - NtIdent(Ident, /* is_raw */ bool), + NtIdent(Ident, IdentIsRaw), NtLifetime(Ident), NtLiteral(P), /// Stuff inside brackets for attributes diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 298c01a4567..adc3056cc29 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -656,7 +656,7 @@ impl TokenStream { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Bracket, [ - TokenTree::token_alone(token::Ident(sym::doc, false), span), + TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span), TokenTree::token_alone(token::Eq, span), TokenTree::token_alone( TokenKind::lit(token::StrRaw(num_of_hashes), data, None), diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 7ea0078ea3b..6e1974f48b2 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { // IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if` (Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _)) - if !Ident::new(*sym, *span).is_reserved() || *is_raw => + if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { || *sym == kw::Fn || *sym == kw::SelfUpper || *sym == kw::Pub - || *is_raw => + || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere token::NtBlock(e) => self.block_to_string(e), token::NtStmt(e) => self.stmt_to_string(e), token::NtPat(e) => self.pat_to_string(e), - token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(), + &token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(), token::NtLifetime(e) => e.to_string(), token::NtLiteral(e) => self.expr_to_string(e), token::NtVis(e) => self.vis_to_string(e), @@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere /* Name components */ token::Ident(s, is_raw) => { - IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into() + IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into() } token::Lifetime(s) => s.to_string().into(), diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index 0b2e63b403b..8489217ad93 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -1,3 +1,4 @@ +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter}; @@ -416,7 +417,7 @@ fn parse_reg<'a>( ) -> PResult<'a, ast::InlineAsmRegOrRegClass> { p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let result = match p.token.uninterpolate().kind { - token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name), + token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name), token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => { *explicit_reg = true; ast::InlineAsmRegOrRegClass::Reg(symbol) diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs index d244897f8a5..01821ee833f 100644 --- a/compiler/rustc_builtin_macros/src/assert/context.rs +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -1,7 +1,6 @@ use rustc_ast::{ ptr::P, - token, - token::Delimiter, + token::{self, Delimiter, IdentIsRaw}, tokenstream::{DelimSpan, TokenStream, TokenTree}, BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID, @@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> { ]; let captures = self.capture_decls.iter().flat_map(|cap| { [ - TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span), + TokenTree::token_joint_hidden( + token::Ident(cap.ident.name, IdentIsRaw::No), + cap.ident.span, + ), TokenTree::token_alone(token::Comma, self.span), ] }); diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index e66cfbe6fb6..ffc8f782fd3 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -107,7 +107,7 @@ use crate::errors; use crate::mbe::{KleeneToken, TokenTree}; -use rustc_ast::token::{Delimiter, Token, TokenKind}; +use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{DiagnosticMessage, MultiSpan}; @@ -409,7 +409,7 @@ fn check_nested_occurrences( match (state, tt) { ( NestedMacroState::Empty, - &TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }), + &TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }), ) => { if name == kw::MacroRules { state = NestedMacroState::MacroRules; diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index c82609503c1..bf99e9e6d5c 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser} use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; use crate::mbe::transcribe::transcribe; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; @@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::As || name == kw::Where => { + Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => { IsInFollow::Yes } _ => IsInFollow::No(TOKENS), @@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes, + Ident(name, _) if name != kw::Priv => IsInFollow::Yes, _ => { if token.can_begin_type() { IsInFollow::Yes diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 3ca0787ce8e..84f7dc4771a 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -1,4 +1,4 @@ -use rustc_ast::token::{self, Delimiter}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw}; use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; use rustc_ast::{LitIntType, LitKind}; use rustc_ast_pretty::pprust; @@ -142,7 +142,7 @@ fn parse_ident<'sess>( if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt { - if let Some((elem, false)) = token.ident() { + if let Some((elem, IdentIsRaw::No)) = token.ident() { return Ok(elem); } let token_str = pprust::token_to_string(token); diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 0fdfa563138..ec1dd807d1a 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -2,7 +2,7 @@ use crate::errors; use crate::mbe::macro_parser::count_metavar_decls; use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree}; -use rustc_ast::token::{self, Delimiter, Token}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token}; use rustc_ast::{tokenstream, NodeId}; use rustc_ast_pretty::pprust; use rustc_feature::Features; @@ -222,7 +222,7 @@ fn parse_tree<'a>( Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); let span = ident.span.with_lo(span.lo()); - if ident.name == kw::Crate && !is_raw { + if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) { TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) } else { TokenTree::MetaVar(span, ident) diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index 7a888250ca1..0e07b41b43c 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -2,6 +2,7 @@ use crate::tests::{ matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error, }; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; @@ -74,9 +75,12 @@ fn string_to_tts_macro() { match tts { [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. }, + _, + ), TokenTree::Token(Token { kind: token::Not, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _), + TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Delimited(.., macro_delim, macro_tts), ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::>(); @@ -90,7 +94,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => { } _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -99,7 +106,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if second_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {} _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), @@ -119,8 +129,11 @@ fn string_to_tts_1() { let tts = string_to_stream("fn a(b: i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)), - TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)), + TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)), + TokenTree::token_joint_hidden( + token::Ident(Symbol::intern("a"), IdentIsRaw::No), + sp(3, 4), + ), TokenTree::Delimited( DelimSpan::from_pair(sp(4, 5), sp(11, 12)), // `JointHidden` because the `(` is followed immediately by @@ -128,10 +141,16 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Parenthesis, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(5, 6), + ), TokenTree::token_alone(token::Colon, sp(6, 7)), // `JointHidden` because the `i32` is immediately followed by the `)`. - TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)), + TokenTree::token_joint_hidden( + token::Ident(sym::i32, IdentIsRaw::No), + sp(8, 11), + ), ]) .into(), ), @@ -143,7 +162,10 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::Alone, Spacing::Alone), Delimiter::Brace, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(15, 16), + ), // `Alone` because the `;` is followed by whitespace. TokenTree::token_alone(token::Semi, sp(16, 17)), ]) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 8f31b5801da..b5595c01b87 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -1,4 +1,5 @@ use crate::base::ExtCtxt; +use ast::token::IdentIsRaw; use pm::bridge::{ server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, }; @@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec op("?"), SingleQuote => op("'"), - Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })), + Ident(sym, is_raw) => { + trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span })) + } Lifetime(name) => { let ident = symbol::Ident::new(name, span).without_first_quote(); trees.extend([ @@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec)> for Vec { trees.push(TokenTree::Ident(Ident { sym: ident.name, - is_raw: *is_raw, + is_raw: matches!(is_raw, IdentIsRaw::Yes), span: ident.span, })) } @@ -352,7 +355,7 @@ impl ToInternal> } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { rustc.sess().symbol_gallery.insert(sym, span); - smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)] + smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)] } TokenTree::Literal(self::Literal { kind: self::LitKind::Integer, @@ -569,7 +572,7 @@ impl server::TokenStream for Rustc<'_, '_> { match &expr.kind { ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => { Ok(tokenstream::TokenStream::token_alone( - token::Ident(token_lit.symbol, false), + token::Ident(token_lit.symbol, IdentIsRaw::No), expr.span, )) } diff --git a/compiler/rustc_expand/src/tokenstream/tests.rs b/compiler/rustc_expand/src/tokenstream/tests.rs index 91c4dd732e3..78795e86fd5 100644 --- a/compiler/rustc_expand/src/tokenstream/tests.rs +++ b/compiler/rustc_expand/src/tokenstream/tests.rs @@ -1,6 +1,6 @@ use crate::tests::string_to_stream; -use rustc_ast::token; +use rustc_ast::token::{self, IdentIsRaw}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_span::create_default_session_globals_then; use rustc_span::{BytePos, Span, Symbol}; @@ -86,7 +86,8 @@ fn test_diseq_1() { fn test_is_empty() { create_default_session_globals_then(|| { let test0 = TokenStream::default(); - let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1)); + let test1 = + TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1)); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index f9149f54e92..30b9e64cb09 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1821,7 +1821,7 @@ impl KeywordIdents { match tt { // Only report non-raw idents. TokenTree::Token(token, _) => { - if let Some((ident, false)) = token.ident() { + if let Some((ident, token::IdentIsRaw::No)) = token.ident() { self.check_ident_token(cx, UnderMacro(true), ident); } } diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index c768ea93b5f..dc9f5bad765 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -4,7 +4,7 @@ use crate::errors; use crate::lexer::unicode_chars::UNICODE_ARRAY; use crate::make_unclosed_delims_error; use rustc_ast::ast::{self, AttrStyle}; -use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey}; @@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym }); } self.sess.raw_identifier_spans.push(span); - token::Ident(sym, true) + token::Ident(sym, IdentIsRaw::Yes) } rustc_lexer::TokenKind::UnknownPrefix => { self.report_unknown_prefix(start); @@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let span = self.mk_sp(start, self.pos); self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() .push(span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } // split up (raw) c string literals to an ident and a string literal when edition < 2021. rustc_lexer::TokenKind::Literal { @@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); self.sess.symbol_gallery.insert(sym, span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index a136abaa28b..3b4e05332fa 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[ // fancier error recovery to it, as there will be less overall work to do this way. const ASCII_ARRAY: &[(&str, &str, Option)] = &[ (" ", "Space", None), - ("_", "Underscore", Some(token::Ident(kw::Underscore, false))), + ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))), ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))), (",", "Comma", Some(token::Comma)), (";", "Semicolon", Some(token::Semi)), diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 659716548d9..7f5d604050d 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -21,6 +21,7 @@ use crate::errors::{ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -264,7 +265,7 @@ impl<'a> Parser<'a> { pub(super) fn expected_ident_found( &mut self, recover: bool, - ) -> PResult<'a, (Ident, /* is_raw */ bool)> { + ) -> PResult<'a, (Ident, IdentIsRaw)> { if let TokenKind::DocComment(..) = self.prev_token.kind { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { span: self.prev_token.span, @@ -290,11 +291,11 @@ impl<'a> Parser<'a> { let bad_token = self.token.clone(); // suggest prepending a keyword in identifier position with `r#` - let suggest_raw = if let Some((ident, false)) = self.token.ident() + let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { - recovered_ident = Some((ident, true)); + recovered_ident = Some((ident, IdentIsRaw::Yes)); // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`, // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` @@ -320,7 +321,7 @@ impl<'a> Parser<'a> { let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| { let (invalid, valid) = self.token.span.split_at(len as u32); - recovered_ident = Some((Ident::new(valid_portion, valid), false)); + recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No)); HelpIdentifierStartsWithNumber { num_span: invalid } }); @@ -653,9 +654,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 20b9581f2ef..455d9c3deb3 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -10,6 +10,7 @@ use super::{ use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; @@ -128,7 +129,7 @@ impl<'a> Parser<'a> { match self.parse_expr_res(restrictions, None) { Ok(expr) => Ok(expr), Err(err) => match self.token.ident() { - Some((Ident { name: kw::Underscore, .. }, false)) + Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` @@ -459,7 +460,9 @@ impl<'a> Parser<'a> { return None; } (Some(op), _) => (op, self.token.span), - (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No))) + if self.may_recover() => + { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), @@ -467,7 +470,7 @@ impl<'a> Parser<'a> { }); (AssocOp::LAnd, span) } - (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), @@ -744,7 +747,7 @@ impl<'a> Parser<'a> { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), - token::Ident(kw::For | kw::Loop | kw::While, false), + token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { @@ -957,19 +960,20 @@ impl<'a> Parser<'a> { fn parse_expr_dot_or_call_with_(&mut self, mut e: P, lo: Span) -> PResult<'a, P> { loop { - let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { - // we are using noexpect here because we don't expect a `?` directly after a `return` - // which could be suggested otherwise - self.eat_noexpect(&token::Question) - } else { - self.eat(&token::Question) - }; + let has_question = + if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + // we are using noexpect here because we don't expect a `?` directly after a `return` + // which could be suggested otherwise + self.eat_noexpect(&token::Question) + } else { + self.eat(&token::Question) + }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } - let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { + let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) @@ -1128,19 +1132,19 @@ impl<'a> Parser<'a> { // 1. DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => { assert!(suffix.is_none()); - self.token = Token::new(token::Ident(sym, false), ident_span); + self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token)) } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => { - self.token = Token::new(token::Ident(symbol1, false), ident1_span); + self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); - let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); + let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } @@ -1948,7 +1952,7 @@ impl<'a> Parser<'a> { self.bump(); // `builtin` self.bump(); // `#` - let Some((ident, false)) = self.token.ident() else { + let Some((ident, IdentIsRaw::No)) = self.token.ident() else { let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span }); return Err(err); }; @@ -3576,7 +3580,7 @@ impl<'a> Parser<'a> { fn find_struct_error_after_field_looking_code(&self) -> Option { match self.token.ident() { Some((ident, is_raw)) - if (is_raw || !ident.is_reserved()) + if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved()) && self.look_ahead(1, |t| *t == token::Colon) => { Some(ast::ExprField { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index e7b9076bd3c..c6e80f3f07f 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -3,6 +3,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; +use ast::token::IdentIsRaw; use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, TokenKind}; @@ -1079,7 +1080,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident @ Ident { name: kw::Underscore, .. }, false)) => { + Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => { self.bump(); Ok(ident) } @@ -1965,7 +1966,7 @@ impl<'a> Parser<'a> { let (ident, is_raw) = self.ident_or_err(true)?; if ident.name == kw::Underscore { self.sess.gated_spans.gate(sym::unnamed_fields, lo); - } else if !is_raw && ident.is_reserved() { + } else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let snapshot = self.create_snapshot_for_diagnostic(); let err = if self.check_fn_front_matter(false, Case::Sensitive) { let inherited_vis = Visibility { @@ -2743,7 +2744,7 @@ impl<'a> Parser<'a> { fn parse_self_param(&mut self) -> PResult<'a, Option> { // Extract an identifier *after* having confirmed that the token is one. let expect_self_ident = |this: &mut Self| match this.token.ident() { - Some((ident, false)) => { + Some((ident, IdentIsRaw::No)) => { this.bump(); ident } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index dea2b9e6ca7..80f6a20b985 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,6 +11,7 @@ mod stmt; mod ty; use crate::lexer::UnmatchedDelim; +use ast::token::IdentIsRaw; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; @@ -499,7 +500,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { let (ident, is_raw) = self.ident_or_err(recover)?; - if !is_raw && ident.is_reserved() { + if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let err = self.expected_ident_found_err(); if recover { err.emit(); @@ -511,7 +512,7 @@ impl<'a> Parser<'a> { Ok(ident) } - fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> { + fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> { match self.token.ident() { Some(ident) => Ok(ident), None => self.expected_ident_found(recover), @@ -568,7 +569,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { true @@ -598,7 +599,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() }); diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 071d6b72f3b..f1572a18a8b 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -201,6 +201,6 @@ impl<'a> Parser<'a> { /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> { token.ident().filter(|(ident, _)| ident.name != kw::Underscore) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 75fc013d3e6..072db24265e 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -311,7 +311,7 @@ impl<'a> Parser<'a> { matches!( &token.uninterpolate().kind, token::FatArrow // e.g. `a | => 0,`. - | token::Ident(kw::If, false) // e.g. `a | if expr`. + | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`. | token::Eq // e.g. `let a | = 0`. | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 681039999a6..6e7bbe7e06d 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; use crate::errors::PathSingleColon; use crate::{errors, maybe_whole}; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::{ @@ -390,7 +391,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident, false)) if ident.is_path_segment_keyword() => { + Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => { self.bump(); Ok(ident) } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 1bae5b32240..5c2f0967b64 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -691,7 +691,7 @@ impl<'a> Parser<'a> { token.kind, token::Ident( kw::For | kw::Loop | kw::While, - false + token::IdentIsRaw::No ) | token::OpenDelim(Delimiter::Brace) ) }) diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs index b736f4a7956..b6880cfc60f 100644 --- a/src/librustdoc/clean/render_macro_matchers.rs +++ b/src/librustdoc/clean/render_macro_matchers.rs @@ -1,4 +1,4 @@ -use rustc_ast::token::{self, BinOpToken, Delimiter}; +use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast_pretty::pprust::state::State as Printer; use rustc_ast_pretty::pprust::PrintState; @@ -148,7 +148,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) { (false, Other) } (Pound, token::Not) => (false, PoundBang), - (_, token::Ident(symbol, /* is_raw */ false)) + (_, token::Ident(symbol, IdentIsRaw::No)) if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) => { (true, Ident) From f5d0d087ad310856f9ed32fdef01acc009a91ff7 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:31:06 +0000 Subject: [PATCH 53/92] Add newtype for `IsTuple` --- .../rustc_builtin_macros/src/deriving/decodable.rs | 2 +- .../rustc_builtin_macros/src/deriving/default.rs | 4 ++-- .../src/deriving/generic/mod.rs | 13 +++++++++++-- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index 97d6b82de98..bf4693cd541 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -198,7 +198,7 @@ where match fields { Unnamed(fields, is_tuple) => { let path_expr = cx.expr_path(outer_pat_path); - if !*is_tuple { + if matches!(is_tuple, IsTuple::No) { path_expr } else { let fields = fields diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index d5a42566e19..0bd2d423a29 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -62,8 +62,8 @@ fn default_struct_substructure( let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ThinVec::new()); let expr = match summary { - Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident), - Unnamed(fields, true) => { + Unnamed(_, IsTuple::No) => cx.expr_ident(trait_span, substr.type_ident), + Unnamed(fields, IsTuple::Yes) => { let exprs = fields.iter().map(|sp| default_call(*sp)).collect(); cx.expr_call_ident(trait_span, substr.type_ident, exprs) } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 6eeb028728c..3ee4fded749 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -286,10 +286,16 @@ pub struct FieldInfo { pub other_selflike_exprs: Vec>, } +#[derive(Copy, Clone)] +pub enum IsTuple { + No, + Yes, +} + /// Fields for a static method pub enum StaticFields { /// Tuple and unit structs/enum variants like this. - Unnamed(Vec, bool /*is tuple*/), + Unnamed(Vec, IsTuple), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } @@ -1439,7 +1445,10 @@ impl<'a> TraitDef<'a> { } } - let is_tuple = matches!(struct_def, ast::VariantData::Tuple(..)); + let is_tuple = match struct_def { + ast::VariantData::Tuple(..) => IsTuple::Yes, + _ => IsTuple::No, + }; match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => cx .dcx() From 4850ae84422569747901c14169b5ed6dfbfb96a3 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:44:33 +0000 Subject: [PATCH 54/92] Add newtype for parser recovery --- compiler/rustc_builtin_macros/src/format.rs | 6 +-- .../rustc_parse/src/parser/diagnostics.rs | 18 ++++----- compiler/rustc_parse/src/parser/expr.rs | 8 ++-- compiler/rustc_parse/src/parser/item.rs | 26 +++++++------ compiler/rustc_parse/src/parser/mod.rs | 37 +++++++++++++------ compiler/rustc_parse/src/parser/stmt.rs | 6 +-- 6 files changed, 57 insertions(+), 44 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index b66f7111ff0..3366378d38d 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -10,6 +10,7 @@ use rustc_ast::{ use rustc_data_structures::fx::FxHashSet; use rustc_errors::{Applicability, DiagnosticBuilder, MultiSpan, PResult, SingleLabelManySpans}; use rustc_expand::base::{self, *}; +use rustc_parse::parser::Recovered; use rustc_parse_format as parse; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{BytePos, InnerSpan, Span}; @@ -111,9 +112,8 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult< _ => return Err(err), } } - Ok(recovered) => { - assert!(recovered); - } + Ok(Recovered::Yes) => (), + Ok(Recovered::No) => unreachable!(), } } first = false; diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 7f5d604050d..f4e7bb413dd 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -22,6 +22,7 @@ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; use ast::token::IdentIsRaw; +use parser::Recovered; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -430,7 +431,7 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -533,7 +534,7 @@ impl<'a> Parser<'a> { sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); - return Ok(true); + return Ok(Recovered::Yes); } else if self.look_ahead(0, |t| { t == &token::CloseDelim(Delimiter::Brace) || ((t.can_begin_expr() || t.can_begin_item()) @@ -557,7 +558,7 @@ impl<'a> Parser<'a> { unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); - return Ok(true); + return Ok(Recovered::Yes); } } @@ -712,7 +713,7 @@ impl<'a> Parser<'a> { if self.check_too_many_raw_str_terminators(&mut err) { if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { err.emit(); - return Ok(true); + return Ok(Recovered::Yes); } else { return Err(err); } @@ -1224,7 +1225,7 @@ impl<'a> Parser<'a> { |p| p.parse_generic_arg(None), ); match x { - Ok((_, _, false)) => { + Ok((_, _, Recovered::No)) => { if self.eat(&token::Gt) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( @@ -1248,7 +1249,7 @@ impl<'a> Parser<'a> { } } } - Ok((_, _, true)) => {} + Ok((_, _, Recovered::Yes)) => {} Err(err) => { err.cancel(); } @@ -1841,10 +1842,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { + pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { let token_str = pprust::token_kind_to_string(t); let this_token_str = super::token_descr(&self.token); let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 455d9c3deb3..081f30ef11a 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -3,7 +3,7 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, + AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, }; @@ -3093,10 +3093,10 @@ impl<'a> Parser<'a> { if !require_comma { arm_body = Some(expr); this.eat(&token::Comma); - Ok(false) + Ok(Recovered::No) } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) { arm_body = Some(body); - Ok(true) + Ok(Recovered::Yes) } else { let expr_span = expr.span; arm_body = Some(expr); @@ -3177,7 +3177,7 @@ impl<'a> Parser<'a> { this.dcx().emit_err(errors::MissingCommaAfterMatchArm { span: arm_span.shrink_to_hi(), }); - return Ok(true); + return Ok(Recovered::Yes); } Err(err) }); diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index c6e80f3f07f..6e9af1e15d8 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,6 +1,8 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; -use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; +use super::{ + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, TrailingToken, +}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; use ast::token::IdentIsRaw; @@ -1534,10 +1536,10 @@ impl<'a> Parser<'a> { err.span_label(span, "while parsing this enum"); err.help(help); err.emit(); - (thin_vec![], true) + (thin_vec![], Recovered::Yes) } }; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { let body = match this.parse_tuple_struct_body() { Ok(body) => body, @@ -1622,7 +1624,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } // No `where` so: `struct Foo;` } else if self.eat(&token::Semi) { @@ -1634,7 +1636,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } // Tuple-style struct definition with optional where-clause. } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); @@ -1663,14 +1665,14 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "union", class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else { let token_str = super::token_descr(&self.token); let msg = format!("expected `where` or `{{` after union name, found {token_str}"); @@ -1687,14 +1689,14 @@ impl<'a> Parser<'a> { adt_ty: &str, ident_span: Span, parsed_where: bool, - ) -> PResult<'a, (ThinVec, /* recovered */ bool)> { + ) -> PResult<'a, (ThinVec, Recovered)> { let mut fields = ThinVec::new(); - let mut recovered = false; + let mut recovered = Recovered::No; if self.eat(&token::OpenDelim(Delimiter::Brace)) { while self.token != token::CloseDelim(Delimiter::Brace) { let field = self.parse_field_def(adt_ty).map_err(|e| { self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No); - recovered = true; + recovered = Recovered::Yes; e }); match field { @@ -2465,8 +2467,8 @@ impl<'a> Parser<'a> { // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't // account for this. match self.expect_one_of(&[], &[]) { - Ok(true) => {} - Ok(false) => unreachable!(), + Ok(Recovered::Yes) => {} + Ok(Recovered::No) => unreachable!(), Err(mut err) => { // Qualifier keywords ordering check enum WrongKw { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 80f6a20b985..27e9fb10a9f 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -358,6 +358,19 @@ pub enum FollowedByType { No, } +/// Whether a function performed recovery +#[derive(Copy, Clone, Debug)] +pub enum Recovered { + No, + Yes, +} + +impl From for bool { + fn from(r: Recovered) -> bool { + matches!(r, Recovered::Yes) + } +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -456,11 +469,11 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(false) + Ok(Recovered::No) } else { self.unexpected_try_recover(t) } @@ -476,13 +489,13 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { if edible.contains(&self.token.kind) { self.bump(); - Ok(false) + Ok(Recovered::No) } else if inedible.contains(&self.token.kind) { // leave it in the input - Ok(false) + Ok(Recovered::No) } else if self.token.kind != token::Eof && self.last_unexpected_token_span == Some(self.token.span) { @@ -784,9 +797,9 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { let mut first = true; - let mut recovered = false; + let mut recovered = Recovered::No; let mut trailing = false; let mut v = ThinVec::new(); @@ -801,12 +814,12 @@ impl<'a> Parser<'a> { } else { // check for separator match self.expect(t) { - Ok(false) /* not recovered */ => { + Ok(Recovered::No) => { self.current_closure.take(); } - Ok(true) /* recovered */ => { + Ok(Recovered::Yes) => { self.current_closure.take(); - recovered = true; + recovered = Recovered::Yes; break; } Err(mut expect_err) => { @@ -979,7 +992,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -993,7 +1006,7 @@ impl<'a> Parser<'a> { f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec, bool /* trailing */)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { + if matches!(recovered, Recovered::No) { self.eat(ket); } Ok((val, trailing)) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 5c2f0967b64..ee02b69c614 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -11,6 +11,7 @@ use crate::errors; use crate::maybe_whole; use crate::errors::MalformedLoopLabel; +use crate::parser::Recovered; use ast::Label; use rustc_ast as ast; use rustc_ast::ptr::P; @@ -661,7 +662,6 @@ impl<'a> Parser<'a> { if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => { // Just check for errors and recover; do not eat semicolon yet. - // `expect_one_of` returns PResult<'a, bool /* recovered */> let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); @@ -669,7 +669,7 @@ impl<'a> Parser<'a> { let replace_with_err = 'break_recover: { match expect_result { // Recover from parser, skip type error to avoid extra errors. - Ok(true) => true, + Ok(Recovered::Yes) => true, Err(e) => { if self.recover_colon_as_semi() { // recover_colon_as_semi has already emitted a nicer error. @@ -735,7 +735,7 @@ impl<'a> Parser<'a> { true } - Ok(false) => false, + Ok(Recovered::No) => false, } }; From acb2cee618bca1be9a1d778a525c07d2792f0f91 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:48:23 +0000 Subject: [PATCH 55/92] Add newtype for trailing in parser --- compiler/rustc_parse/src/parser/expr.rs | 4 ++-- compiler/rustc_parse/src/parser/item.rs | 5 +++-- compiler/rustc_parse/src/parser/mod.rs | 22 ++++++++++++++-------- compiler/rustc_parse/src/parser/pat.rs | 16 +++++++++------- compiler/rustc_parse/src/parser/ty.rs | 4 ++-- 5 files changed, 30 insertions(+), 21 deletions(-) diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 081f30ef11a..cb46eb25fa4 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,7 +4,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, - SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, + SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken, }; use crate::errors; @@ -1561,7 +1561,7 @@ impl<'a> Parser<'a> { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); } }; - let kind = if es.len() == 1 && !trailing_comma { + let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 6e9af1e15d8..b9fbf1c5765 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,7 +1,8 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, TrailingToken, + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing, + TrailingToken, }; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; @@ -1459,7 +1460,7 @@ impl<'a> Parser<'a> { let (variants, _) = if self.token == TokenKind::Semi { self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); - (thin_vec![], false) + (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span)) .map_err(|mut err| { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 27e9fb10a9f..29dd2eeb56a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -371,6 +371,12 @@ impl From for bool { } } +#[derive(Copy, Clone, Debug)] +pub enum Trailing { + No, + Yes, +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -797,10 +803,10 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { + ) -> PResult<'a, (ThinVec, Trailing, Recovered)> { let mut first = true; let mut recovered = Recovered::No; - let mut trailing = false; + let mut trailing = Trailing::No; let mut v = ThinVec::new(); while !self.expect_any_with_type(kets, expect) { @@ -914,7 +920,7 @@ impl<'a> Parser<'a> { } } if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { - trailing = true; + trailing = Trailing::Yes; break; } @@ -992,7 +998,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { + ) -> PResult<'a, (ThinVec, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -1004,7 +1010,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; if matches!(recovered, Recovered::No) { self.eat(ket); @@ -1021,7 +1027,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.expect(bra)?; self.parse_seq_to_end(ket, sep, f) } @@ -1033,7 +1039,7 @@ impl<'a> Parser<'a> { &mut self, delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.parse_unspanned_seq( &token::OpenDelim(delim), &token::CloseDelim(delim), @@ -1048,7 +1054,7 @@ impl<'a> Parser<'a> { fn parse_paren_comma_seq( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 072db24265e..2ede19b11e0 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1,4 +1,4 @@ -use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken}; +use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken}; use crate::errors::{ self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, @@ -696,7 +696,9 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. - Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + let paren_pattern = + fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest()); + if paren_pattern { let pat = fields.into_iter().next().unwrap(); let close_paren = self.prev_token.span; @@ -714,7 +716,7 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(begin.clone(), form)? + self.parse_pat_range_begin_with(begin.clone(), form) } // recover ranges with parentheses around the `(start)..` PatKind::Err(_) @@ -729,15 +731,15 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)? + self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form) } // (pat) with optional parentheses - _ => PatKind::Paren(pat), + _ => Ok(PatKind::Paren(pat)), } } else { - PatKind::Tuple(fields) - }) + Ok(PatKind::Tuple(fields)) + } } /// Parse a mutable binding with the `mut` token already eaten. diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index f79f2a813b2..d65e06494fc 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,4 +1,4 @@ -use super::{Parser, PathStyle, TokenType}; +use super::{Parser, PathStyle, TokenType, Trailing}; use crate::errors::{ self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType, @@ -415,7 +415,7 @@ impl<'a> Parser<'a> { Ok(ty) })?; - if ts.len() == 1 && !trailing { + if ts.len() == 1 && matches!(trailing, Trailing::No) { let ty = ts.into_iter().next().unwrap().into_inner(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { From cb51c850232d8b98b37c0cde6090392b9f077939 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:50:50 +0000 Subject: [PATCH 56/92] Use `Recovered` more --- .../rustc_parse/src/parser/diagnostics.rs | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index f4e7bb413dd..03a2b47683a 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> { err: &mut ComparisonOperatorsCannotBeChained, inner_op: &Expr, outer_op: &Spanned, - ) -> bool /* advanced the cursor */ { + ) -> Recovered { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind && ident.as_str().parse::().is_err() @@ -1296,7 +1296,7 @@ impl<'a> Parser<'a> { { // The parser has encountered `foo.bar Parser<'a> { span: inner_op.span.shrink_to_hi(), middle_term: expr_to_str(r1), }); - false // Keep the current parse behavior, where the AST is `(x < y) < z`. + Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`. } // `x == y < z` (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => { @@ -1329,12 +1329,12 @@ impl<'a> Parser<'a> { left: r1.span.shrink_to_lo(), right: r2.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::Yes } } } @@ -1349,19 +1349,19 @@ impl<'a> Parser<'a> { left: l1.span.shrink_to_lo(), right: r1.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::No } } } - _ => false, + _ => Recovered::No, }; } - false + Recovered::No } /// Produces an error if comparison operators are chained (RFC #558). @@ -1489,8 +1489,9 @@ impl<'a> Parser<'a> { // If it looks like a genuine attempt to chain operators (as opposed to a // misformatted turbofish, for instance), suggest a correct form. - if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) - { + let recovered = self + .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + if matches!(recovered, Recovered::Yes) { self.dcx().emit_err(err); mk_err_expr(self, inner_op.span.to(self.prev_token.span)) } else { @@ -1502,7 +1503,7 @@ impl<'a> Parser<'a> { let recover = self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); self.dcx().emit_err(err); - if recover { + if matches!(recover, Recovered::Yes) { return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); } } From 9bfc46c5d8cafda9b9fe4a5ce38f134edb37cf6f Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:57:43 +0000 Subject: [PATCH 57/92] Add newtype for first input type --- .../src/coherence/orphan.rs | 10 ++++----- .../src/traits/coherence.rs | 21 ++++++++++++++++--- .../rustc_trait_selection/src/traits/mod.rs | 2 +- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index 07bbaa1926e..b46a67d08eb 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -1,14 +1,13 @@ //! Orphan checker: every impl either implements a trait defined in this //! crate or pertains to a type defined in this crate. +use crate::errors; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_middle::ty::{self, AliasKind, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; use rustc_span::Span; -use rustc_trait_selection::traits; - -use crate::errors; +use rustc_trait_selection::traits::{self, IsFirstInputType}; #[instrument(skip(tcx), level = "debug")] pub(crate) fn orphan_check_impl( @@ -288,7 +287,7 @@ fn emit_orphan_check_error<'tcx>( (Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new()); let mut sugg = None; for &(mut ty, is_target_ty) in &tys { - let span = if is_target_ty { + let span = if matches!(is_target_ty, IsFirstInputType::Yes) { // Point at `D` in `impl for C in D` self_ty_span } else { @@ -321,7 +320,8 @@ fn emit_orphan_check_error<'tcx>( } } - let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty; + let is_foreign = + !trait_ref.def_id.is_local() && matches!(is_target_ty, IsFirstInputType::No); match &ty.kind() { ty::Slice(_) => { diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index f663f02f872..0d3169cec14 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -598,9 +598,24 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>( trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental) } +#[derive(Debug, Copy, Clone)] +pub enum IsFirstInputType { + No, + Yes, +} + +impl From for IsFirstInputType { + fn from(b: bool) -> IsFirstInputType { + match b { + false => IsFirstInputType::No, + true => IsFirstInputType::Yes, + } + } +} + #[derive(Debug)] pub enum OrphanCheckErr<'tcx> { - NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>), + NonLocalInputType(Vec<(Ty<'tcx>, IsFirstInputType)>), UncoveredTy(Ty<'tcx>, Option>), } @@ -751,7 +766,7 @@ struct OrphanChecker<'tcx, F> { /// Ignore orphan check failures and exclusively search for the first /// local type. search_first_local_ty: bool, - non_local_tys: Vec<(Ty<'tcx>, bool)>, + non_local_tys: Vec<(Ty<'tcx>, IsFirstInputType)>, } impl<'tcx, F, E> OrphanChecker<'tcx, F> @@ -769,7 +784,7 @@ where } fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow> { - self.non_local_tys.push((t, self.in_self_ty)); + self.non_local_tys.push((t, self.in_self_ty.into())); ControlFlow::Continue(()) } diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index 9eec60ea06c..32447aca390 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -42,7 +42,7 @@ use std::fmt::Debug; use std::ops::ControlFlow; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; -pub use self::coherence::{OrphanCheckErr, OverlapResult}; +pub use self::coherence::{IsFirstInputType, OrphanCheckErr, OverlapResult}; pub use self::engine::{ObligationCtxt, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; pub use self::normalize::NormalizeExt; From 3377dac31e5990a6fd65660f356806c82a8327c7 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Wed, 14 Feb 2024 00:01:08 +0000 Subject: [PATCH 58/92] Add newtype for signedness in LLVM SIMD --- compiler/rustc_codegen_llvm/src/intrinsic.rs | 55 +++++++++++--------- 1 file changed, 31 insertions(+), 24 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 4415c51acf6..574097e82dc 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -2094,9 +2094,16 @@ fn generic_simd_intrinsic<'ll, 'tcx>( return Ok(args[0].immediate()); } + #[derive(Copy, Clone)] + enum Sign { + Unsigned, + Signed, + } + use Sign::*; + enum Style { Float, - Int(/* is signed? */ bool), + Int(Sign), Unsupported, } @@ -2104,11 +2111,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( // vectors of pointer-sized integers should've been // disallowed before here, so this unwrap is safe. ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2116,11 +2123,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; let (out_style, out_width) = match out_elem.kind() { ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2128,31 +2135,31 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; match (in_style, out_style) { - (Style::Int(in_is_signed), Style::Int(_)) => { + (Style::Int(sign), Style::Int(_)) => { return Ok(match in_width.cmp(&out_width) { Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty), Ordering::Equal => args[0].immediate(), - Ordering::Less => { - if in_is_signed { - bx.sext(args[0].immediate(), llret_ty) - } else { - bx.zext(args[0].immediate(), llret_ty) - } - } + Ordering::Less => match sign { + Sign::Signed => bx.sext(args[0].immediate(), llret_ty), + Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty), + }, }); } - (Style::Int(in_is_signed), Style::Float) => { - return Ok(if in_is_signed { - bx.sitofp(args[0].immediate(), llret_ty) - } else { - bx.uitofp(args[0].immediate(), llret_ty) - }); + (Style::Int(Sign::Signed), Style::Float) => { + return Ok(bx.sitofp(args[0].immediate(), llret_ty)); } - (Style::Float, Style::Int(out_is_signed)) => { - return Ok(match (out_is_signed, name == sym::simd_as) { - (false, false) => bx.fptoui(args[0].immediate(), llret_ty), - (true, false) => bx.fptosi(args[0].immediate(), llret_ty), - (_, true) => bx.cast_float_to_int(out_is_signed, args[0].immediate(), llret_ty), + (Style::Int(Sign::Unsigned), Style::Float) => { + return Ok(bx.uitofp(args[0].immediate(), llret_ty)); + } + (Style::Float, Style::Int(sign)) => { + return Ok(match (sign, name == sym::simd_as) { + (Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty), + (Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty), + (_, true) => bx.cast_float_to_int( + matches!(sign, Sign::Signed), + args[0].immediate(), + llret_ty, + ), }); } (Style::Float, Style::Float) => { From 06e77397e18a8743087ef3f2b5546a148c0560ec Mon Sep 17 00:00:00 2001 From: clubby789 Date: Wed, 14 Feb 2024 00:04:54 +0000 Subject: [PATCH 59/92] Add newtype for using the prelude in resolution --- compiler/rustc_resolve/src/diagnostics.rs | 2 +- compiler/rustc_resolve/src/ident.rs | 24 ++++++++++++++++------- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index 4b978fefa10..99ce4725432 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { suggestions.extend( tmp_suggestions .into_iter() - .filter(|s| use_prelude || this.is_builtin_macro(s.res)), + .filter(|s| use_prelude.into() || this.is_builtin_macro(s.res)), ); } } diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index 4583f991cab..7e7424be303 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -23,6 +23,18 @@ use Namespace::*; type Visibility = ty::Visibility; +#[derive(Copy, Clone)] +pub enum UsePrelude { + No, + Yes, +} + +impl From for bool { + fn from(up: UsePrelude) -> bool { + matches!(up, UsePrelude::Yes) + } +} + impl<'a, 'tcx> Resolver<'a, 'tcx> { /// A generic scope visitor. /// Visits scopes in order to resolve some identifier in them or perform other actions. @@ -32,12 +44,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { scope_set: ScopeSet<'a>, parent_scope: &ParentScope<'a>, ctxt: SyntaxContext, - mut visitor: impl FnMut( - &mut Self, - Scope<'a>, - /*use_prelude*/ bool, - SyntaxContext, - ) -> Option, + mut visitor: impl FnMut(&mut Self, Scope<'a>, UsePrelude, SyntaxContext) -> Option, ) -> Option { // General principles: // 1. Not controlled (user-defined) names should have higher priority than controlled names @@ -133,6 +140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; if visit { + let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No }; if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) { return break_result; } @@ -579,7 +587,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None, ignore_binding, ) { - if use_prelude || this.is_builtin_macro(binding.res()) { + if matches!(use_prelude, UsePrelude::Yes) + || this.is_builtin_macro(binding.res()) + { result = Ok((binding, Flags::MISC_FROM_PRELUDE)); } } From 9dee352da09f53af244ecb651885dd0e62fc594d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 15:55:17 +0100 Subject: [PATCH 60/92] fix: server hanging up on build script task --- crates/rust-analyzer/src/global_state.rs | 20 ++++---------------- crates/rust-analyzer/src/lsp/utils.rs | 1 + crates/rust-analyzer/src/reload.rs | 7 ++----- 3 files changed, 7 insertions(+), 21 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 293807a383b..b2d507491b1 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -301,19 +301,12 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some(( - path.clone(), - false, - AsRef::::as_ref(&path).ends_with("build.rs"), - )); + workspace_structure_change = Some((path.clone(), false)); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = Some(( - path, - self.crate_graph_file_dependencies.contains(vfs_path), - false, - )); + workspace_structure_change = + Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -365,16 +358,11 @@ impl GlobalState { // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload, build_scripts_touched)) = - workspace_structure_change - { + if let Some((path, force_crate_graph_reload)) = workspace_structure_change { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); - if build_scripts_touched { - self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); - } } } diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs index 10335cb1453..800c0eee53a 100644 --- a/crates/rust-analyzer/src/lsp/utils.rs +++ b/crates/rust-analyzer/src/lsp/utils.rs @@ -134,6 +134,7 @@ impl GlobalState { let token = lsp_types::ProgressToken::String( cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")), ); + tracing::debug!(?token, ?state, "report_progress {message:?}"); let work_done_progress = match state { Progress::Begin => { self.send_request::( diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 5895459d1fc..00494ca5ba0 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -411,10 +411,7 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } - if self.build_deps_changed && self.config.run_build_scripts() { - self.build_deps_changed = false; - self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); - } + // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -424,7 +421,7 @@ impl GlobalState { // Here, we completely changed the workspace (Cargo.toml edit), so // we don't care about build-script results, they are stale. - // FIXME: can we abort the build scripts here? + // FIXME: can we abort the build scripts here if they are already running? self.workspaces = Arc::new(workspaces); if self.config.run_build_scripts() { From 16b15a203ec888714ffbcc7168a259d18555caae Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 17:02:59 +0100 Subject: [PATCH 61/92] internal: Attempt to add a timeout to rustc-tests --- .../src/handlers/useless_braces.rs | 4 +- crates/ide-diagnostics/src/lib.rs | 20 ++++-- crates/rust-analyzer/src/cli/rustc_tests.rs | 70 +++++++++++++++---- 3 files changed, 71 insertions(+), 23 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index 863a7ab783e..79bcaa0a9c4 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -4,7 +4,7 @@ use ide_db::{ source_change::SourceChange, }; use itertools::Itertools; -use syntax::{ast, AstNode, SyntaxNode}; +use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode}; @@ -43,7 +43,7 @@ pub(crate) fn useless_braces( "Unnecessary braces in use statement".to_owned(), FileRange { file_id, range: use_range }, ) - .with_main_node(InFile::new(file_id.into(), node.clone())) + .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node))) .with_fixes(Some(vec![fix( "remove_braces", "Remove unnecessary braces", diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 4428b8baafb..9f4368b04e7 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -142,7 +142,7 @@ pub struct Diagnostic { pub experimental: bool, pub fixes: Option>, // The node that will be affected by `#[allow]` and similar attributes. - pub main_node: Option>, + pub main_node: Option>, } impl Diagnostic { @@ -174,9 +174,8 @@ impl Diagnostic { message: impl Into, node: InFile, ) -> Diagnostic { - let file_id = node.file_id; Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node)) - .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) + .with_main_node(node) } fn experimental(mut self) -> Diagnostic { @@ -184,7 +183,7 @@ impl Diagnostic { self } - fn with_main_node(mut self, main_node: InFile) -> Diagnostic { + fn with_main_node(mut self, main_node: InFile) -> Diagnostic { self.main_node = Some(main_node); self } @@ -394,8 +393,17 @@ pub fn diagnostics( res.push(d) } - let mut diagnostics_of_range = - res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::>(); + let mut diagnostics_of_range = res + .iter_mut() + .filter_map(|it| { + Some(( + it.main_node + .map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id)))) + .clone()?, + it, + )) + }) + .collect::>(); let mut rustc_stack: FxHashMap> = FxHashMap::default(); let mut clippy_stack: FxHashMap> = FxHashMap::default(); diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index 25f84d770bf..b3b6da1f698 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -1,9 +1,13 @@ //! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. +use std::convert::identity; +use std::thread::Builder; +use std::time::{Duration, Instant}; use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf}; use hir::{Change, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; +use itertools::Either; use profile::StopWatch; use project_model::target_data_layout::RustcDataLayoutConfig; use project_model::{target_data_layout, CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; @@ -100,6 +104,7 @@ impl Tester { } fn test(&mut self, p: PathBuf) { + println!("{}", p.display()); if p.parent().unwrap().file_name().unwrap() == "auxiliary" { // These are not tests return; @@ -132,15 +137,44 @@ impl Tester { self.host.apply_change(change); let diagnostic_config = DiagnosticsConfig::test_sample(); + let res = std::thread::scope(|s| { + let worker = Builder::new() + .stack_size(40 * 1024 * 1024) + .spawn_scoped(s, { + let diagnostic_config = &diagnostic_config; + let main = std::thread::current(); + let analysis = self.host.analysis(); + let root_file = self.root_file; + move || { + let res = std::panic::catch_unwind(move || { + analysis.diagnostics( + diagnostic_config, + ide::AssistResolveStrategy::None, + root_file, + ) + }); + main.unpark(); + res + } + }) + .unwrap(); + + let timeout = Duration::from_secs(5); + let now = Instant::now(); + while now.elapsed() <= timeout && !worker.is_finished() { + std::thread::park_timeout(timeout - now.elapsed()); + } + + if !worker.is_finished() { + // attempt to cancel the worker, won't work for chalk hangs unfortunately + self.host.request_cancellation(); + } + worker.join().and_then(identity) + }); let mut actual = FxHashMap::default(); - let panicked = match std::panic::catch_unwind(|| { - self.host - .analysis() - .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file) - .unwrap() - }) { - Err(e) => Some(e), - Ok(diags) => { + let panicked = match res { + Err(e) => Some(Either::Left(e)), + Ok(Ok(diags)) => { for diag in diags { if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) { continue; @@ -152,6 +186,7 @@ impl Tester { } None } + Ok(Err(e)) => Some(Either::Right(e)), }; // Ignore tests with diagnostics that we don't emit. ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k)); @@ -159,14 +194,19 @@ impl Tester { println!("{p:?} IGNORE"); self.ignore_count += 1; } else if let Some(panic) = panicked { - if let Some(msg) = panic - .downcast_ref::() - .map(String::as_str) - .or_else(|| panic.downcast_ref::<&str>().copied()) - { - println!("{msg:?} ") + match panic { + Either::Left(panic) => { + if let Some(msg) = panic + .downcast_ref::() + .map(String::as_str) + .or_else(|| panic.downcast_ref::<&str>().copied()) + { + println!("{msg:?} ") + } + println!("{p:?} PANIC"); + } + Either::Right(_) => println!("{p:?} CANCELLED"), } - println!("PANIC"); self.fail_count += 1; } else if actual == expected { println!("{p:?} PASS"); From 07421c13d48918984bd51fad2cde529ebf23c5ae Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Tue, 20 Feb 2024 14:01:50 -0500 Subject: [PATCH 62/92] fix: Don't add `\` before `{` The LSP snippet grammar only specifies that `$`, `}`, and `\` can be escaped with backslashes, but not `{`. --- crates/rust-analyzer/src/lsp/to_proto.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 4101d476cd3..481ebfefd4e 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1002,10 +1002,8 @@ fn merge_text_and_snippet_edits( let mut new_text = current_indel.insert; // find which snippet bits need to be escaped - let escape_places = new_text - .rmatch_indices(['\\', '$', '{', '}']) - .map(|(insert, _)| insert) - .collect_vec(); + let escape_places = + new_text.rmatch_indices(['\\', '$', '}']).map(|(insert, _)| insert).collect_vec(); let mut escape_places = escape_places.into_iter().peekable(); let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { @@ -2176,7 +2174,7 @@ fn bar(_: usize) {} character: 0, }, }, - new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", + new_text: "\\$${1:ab{\\}\\$c\\\\d}ef", insert_text_format: Some( Snippet, ), @@ -2272,7 +2270,7 @@ struct ProcMacro { character: 5, }, }, - new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2336,7 +2334,7 @@ struct P { character: 5, }, }, - new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2401,7 +2399,7 @@ struct ProcMacro { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2466,7 +2464,7 @@ struct P { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), From 344a79c17dd321279897cc287313ff38e0fe255e Mon Sep 17 00:00:00 2001 From: Chase Douglas Date: Tue, 20 Feb 2024 16:42:20 -0800 Subject: [PATCH 63/92] Drop RUSTC_BOOTSTRAP env var when building build scripts Some packages (e.g. thiserror) force a recompile if the value of the `RUSTC_BOOTSTRAP` env var changes. RA sets the variable to 1 in order to enable rustc / cargo unstable options it uses. This causes flapping recompiles when building outside of RA. As of Cargo 1.75 the `--keep-going` flag is stable. This change uses the flag without `RUSTC_BOOTSTRAP` if the Cargo version is >= 1.75, and drops `--keep-going` otherwise. This fixes build script recompilation. --- crates/project-model/src/build_scripts.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index ab72f1fba09..621b6ca3efa 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -138,7 +138,7 @@ impl WorkspaceBuildScripts { toolchain: &Option, sysroot: Option<&Sysroot>, ) -> io::Result { - const RUST_1_62: Version = Version::new(1, 62, 0); + const RUST_1_75: Version = Version::new(1, 75, 0); let current_dir = match &config.invocation_location { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { @@ -162,7 +162,7 @@ impl WorkspaceBuildScripts { progress, ) { Ok(WorkspaceBuildScripts { error: Some(error), .. }) - if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => + if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_75) => { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data @@ -172,7 +172,8 @@ impl WorkspaceBuildScripts { &workspace.workspace_root().to_path_buf(), sysroot, )?; - cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + + cmd.args(["--keep-going"]); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) From 2826eb51aa60e2e73d7f35408d2ee2727ec36334 Mon Sep 17 00:00:00 2001 From: Chase Douglas Date: Tue, 20 Feb 2024 16:49:07 -0800 Subject: [PATCH 64/92] Don't build dependencies when retrieving target data layout `cargo rustc -- ` first builds dependencies then calls `rustc ` for the current package. Here, we don't want to build dependencies, we just want to call `rustc --print`. An unstable `cargo rustc` `--print` command bypasses building dependencies first. This speeds up execution of this code path and ensures RA doesn't recompile dependencies with the `RUSTC_BOOTSRAP=1` env var flag set. Note that we must pass `-Z unstable-options` twice, first to enable the `cargo` unstable `--print` flag, then later to enable the unstable `rustc` `target-spec-json` print request. --- crates/project-model/src/target_data_layout.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index af635dda578..98917351c5e 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -32,7 +32,16 @@ pub fn get( Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) + .args([ + "rustc", + "-Z", + "unstable-options", + "--print", + "target-spec-json", + "--", + "-Z", + "unstable-options", + ]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); From 6edbc8d875987b25c7bc64c6c903611841645e5f Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 21 Feb 2024 19:05:45 +0000 Subject: [PATCH 65/92] Prevent cycle in implied predicates computation --- .../src/collect/predicates_of.rs | 30 ++++++++++++++----- .../implied-bounds-cycle.rs | 10 +++++++ .../implied-bounds-cycle.stderr | 17 +++++++++++ 3 files changed, 49 insertions(+), 8 deletions(-) create mode 100644 tests/ui/associated-type-bounds/implied-bounds-cycle.rs create mode 100644 tests/ui/associated-type-bounds/implied-bounds-cycle.stderr diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 351ac2eb770..f70bb8c4289 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -640,16 +640,30 @@ pub(super) fn implied_predicates_with_filter( // Now require that immediate supertraits are converted, which will, in // turn, reach indirect supertraits, so we detect cycles now instead of - // overflowing during elaboration. - if matches!(filter, PredicateFilter::SelfOnly) { - for &(pred, span) in implied_bounds { - debug!("superbound: {:?}", pred); - if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() - && bound.polarity == ty::ImplPolarity::Positive - { - tcx.at(span).super_predicates_of(bound.def_id()); + // overflowing during elaboration. Same for implied predicates, which + // make sure we walk into associated type bounds. + match filter { + PredicateFilter::SelfOnly => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).super_predicates_of(bound.def_id()); + } } } + PredicateFilter::SelfAndAssociatedTypeBounds => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).implied_predicates_of(bound.def_id()); + } + } + } + _ => {} } ty::GenericPredicates { parent: None, predicates: implied_bounds } diff --git a/tests/ui/associated-type-bounds/implied-bounds-cycle.rs b/tests/ui/associated-type-bounds/implied-bounds-cycle.rs new file mode 100644 index 00000000000..785d47d4791 --- /dev/null +++ b/tests/ui/associated-type-bounds/implied-bounds-cycle.rs @@ -0,0 +1,10 @@ +#![feature(associated_type_bounds)] + +trait A { + type T; +} + +trait B: A {} +//~^ ERROR cycle detected when computing the implied predicates of `B` + +fn main() {} diff --git a/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr b/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr new file mode 100644 index 00000000000..1c1c64ea5f5 --- /dev/null +++ b/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr @@ -0,0 +1,17 @@ +error[E0391]: cycle detected when computing the implied predicates of `B` + --> $DIR/implied-bounds-cycle.rs:7:15 + | +LL | trait B: A {} + | ^ + | + = note: ...which immediately requires computing the implied predicates of `B` again +note: cycle used when computing normalized predicates of `B` + --> $DIR/implied-bounds-cycle.rs:7:1 + | +LL | trait B: A {} + | ^^^^^^^^^^^^^^^^ + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0391`. From 3197aee8b31faa200081143c9d353119400759df Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:08:49 +0300 Subject: [PATCH 66/92] support `no-op` compression profile in rust-installer Signed-off-by: onur-ozkan --- src/tools/rust-installer/src/compression.rs | 11 ++++++++++- src/tools/rust-installer/src/main.rs | 2 +- src/tools/rust-installer/src/tarballer.rs | 4 ++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-installer/src/compression.rs b/src/tools/rust-installer/src/compression.rs index 902b2ec6907..4e840dbfbb4 100644 --- a/src/tools/rust-installer/src/compression.rs +++ b/src/tools/rust-installer/src/compression.rs @@ -1,11 +1,12 @@ use anyhow::{Context, Error}; use flate2::{read::GzDecoder, write::GzEncoder}; use rayon::prelude::*; -use std::{convert::TryFrom, fmt, io::Read, io::Write, path::Path, str::FromStr}; +use std::{fmt, io::Read, io::Write, path::Path, str::FromStr}; use xz2::{read::XzDecoder, write::XzEncoder}; #[derive(Default, Debug, Copy, Clone)] pub enum CompressionProfile { + NoOp, Fast, #[default] Balanced, @@ -20,6 +21,7 @@ impl FromStr for CompressionProfile { "fast" => Self::Fast, "balanced" => Self::Balanced, "best" => Self::Best, + "no-op" => Self::NoOp, other => anyhow::bail!("invalid compression profile: {other}"), }) } @@ -31,6 +33,7 @@ impl fmt::Display for CompressionProfile { CompressionProfile::Fast => f.write_str("fast"), CompressionProfile::Balanced => f.write_str("balanced"), CompressionProfile::Best => f.write_str("best"), + CompressionProfile::NoOp => f.write_str("no-op"), } } } @@ -78,10 +81,16 @@ impl CompressionFormat { CompressionProfile::Fast => flate2::Compression::fast(), CompressionProfile::Balanced => flate2::Compression::new(6), CompressionProfile::Best => flate2::Compression::best(), + CompressionProfile::NoOp => panic!( + "compression profile 'no-op' should not call `CompressionFormat::encode`." + ), }, )), CompressionFormat::Xz => { let encoder = match profile { + CompressionProfile::NoOp => panic!( + "compression profile 'no-op' should not call `CompressionFormat::encode`." + ), CompressionProfile::Fast => { xz2::stream::MtStreamBuilder::new().threads(6).preset(1).encoder().unwrap() } diff --git a/src/tools/rust-installer/src/main.rs b/src/tools/rust-installer/src/main.rs index 99acecdd43c..efb4c5bcb83 100644 --- a/src/tools/rust-installer/src/main.rs +++ b/src/tools/rust-installer/src/main.rs @@ -1,5 +1,5 @@ use anyhow::{Context, Result}; -use clap::{self, Parser}; +use clap::Parser; #[derive(Parser)] struct CommandLine { diff --git a/src/tools/rust-installer/src/tarballer.rs b/src/tools/rust-installer/src/tarballer.rs index 7572dc6dcf8..e5a925b2cbf 100644 --- a/src/tools/rust-installer/src/tarballer.rs +++ b/src/tools/rust-installer/src/tarballer.rs @@ -38,6 +38,10 @@ actor! { impl Tarballer { /// Generates the actual tarballs pub fn run(self) -> Result<()> { + if let CompressionProfile::NoOp = self.compression_profile { + return Ok(()); + } + let tarball_name = self.output.clone() + ".tar"; let encoder = CombinedEncoder::new( self.compression_formats From 94597e85cfd2df40b467f087a12c8f727813b947 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:09:12 +0300 Subject: [PATCH 67/92] force dist.compression-profile = "no-op" for `x install` Signed-off-by: onur-ozkan --- src/bootstrap/src/utils/tarball.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/src/utils/tarball.rs b/src/bootstrap/src/utils/tarball.rs index 573d923ed8f..a14dfd1ca12 100644 --- a/src/bootstrap/src/utils/tarball.rs +++ b/src/bootstrap/src/utils/tarball.rs @@ -3,8 +3,8 @@ use std::{ process::Command, }; -use crate::core::build_steps::dist::distdir; use crate::core::builder::Builder; +use crate::core::{build_steps::dist::distdir, builder::Kind}; use crate::utils::channel; use crate::utils::helpers::t; @@ -325,7 +325,22 @@ impl<'a> Tarball<'a> { assert!(!formats.is_empty(), "dist.compression-formats can't be empty"); cmd.arg("--compression-formats").arg(formats.join(",")); } - cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]); + + // For `x install` tarball files aren't needed, so we can speed up the process by not producing them. + let compression_profile = if self.builder.kind == Kind::Install { + self.builder.verbose("Forcing dist.compression-profile = 'no-op' for `x install`."); + // "no-op" indicates that the rust-installer won't produce compressed tarball sources. + "no-op" + } else { + assert!( + self.builder.config.dist_compression_profile != "no-op", + "dist.compression-profile = 'no-op' can only be used for `x install`" + ); + + &self.builder.config.dist_compression_profile + }; + + cmd.args(&["--compression-profile", compression_profile]); self.builder.run(&mut cmd); // Ensure there are no symbolic links in the tarball. In particular, From a13ec8d00396ac6f5a3f285f8fcd95a2ab6c8824 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:33:06 +0300 Subject: [PATCH 68/92] add changelog entry Signed-off-by: onur-ozkan --- src/bootstrap/src/utils/change_tracker.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index b813d82ca6f..9a50ad4437e 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -131,4 +131,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "The \"codegen\"/\"llvm\" profile has been removed and replaced with \"compiler\", use it instead for the same behavior.", }, + ChangeInfo { + change_id: 118724, + severity: ChangeSeverity::Info, + summary: "`x install` now skips providing tarball sources (under 'build/dist' path) to speed up the installation process.", + }, ]; From f89d17b4269fa5ed589b7a87824382edd0a9eea2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 10:42:30 +0100 Subject: [PATCH 69/92] Remove ops_salsa_runtime_mut, replace it with direct synthetic_write API --- crates/ide-db/src/apply_change.rs | 2 +- crates/salsa/salsa-macros/src/database_storage.rs | 4 ++-- crates/salsa/src/lib.rs | 13 +++++++++---- crates/salsa/src/plumbing.rs | 11 +++++++++-- crates/salsa/tests/incremental/memoized_volatile.rs | 4 ++-- crates/salsa/tests/on_demand_inputs.rs | 4 ++-- crates/salsa/tests/storage_varieties/tests.rs | 4 ++-- 7 files changed, 27 insertions(+), 15 deletions(-) diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 1a214ef0bf5..2b2df144d6d 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -17,7 +17,7 @@ impl RootDatabase { pub fn request_cancellation(&mut self) { let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered(); - self.salsa_runtime_mut().synthetic_write(Durability::LOW); + self.synthetic_write(Durability::LOW); } pub fn apply_change(&mut self, change: Change) { diff --git a/crates/salsa/salsa-macros/src/database_storage.rs b/crates/salsa/salsa-macros/src/database_storage.rs index 0ec75bb043d..223da9b5290 100644 --- a/crates/salsa/salsa-macros/src/database_storage.rs +++ b/crates/salsa/salsa-macros/src/database_storage.rs @@ -154,8 +154,8 @@ pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream { self.#db_storage_field.salsa_runtime() } - fn ops_salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { - self.#db_storage_field.salsa_runtime_mut() + fn synthetic_write(&mut self, durability: salsa::Durability) { + self.#db_storage_field.salsa_runtime_mut().synthetic_write(durability) } fn fmt_index( diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 48b5d633bd6..98b3a48e37c 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -96,11 +96,16 @@ pub trait Database: plumbing::DatabaseOps { self.ops_salsa_runtime() } - /// Gives access to the underlying salsa runtime. + /// A "synthetic write" causes the system to act *as though* some + /// input of durability `durability` has changed. This is mostly + /// useful for profiling scenarios. /// - /// This method should not be overridden by `Database` implementors. - fn salsa_runtime_mut(&mut self) -> &mut Runtime { - self.ops_salsa_runtime_mut() + /// **WARNING:** Just like an ordinary write, this method triggers + /// cancellation. If you invoke it while a snapshot exists, it + /// will block until that snapshot is dropped -- if that snapshot + /// is owned by the current thread, this could trigger deadlock. + fn synthetic_write(&mut self, durability: Durability) { + plumbing::DatabaseOps::synthetic_write(self, durability) } } diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs index 71332e39cad..b8df87fd5e5 100644 --- a/crates/salsa/src/plumbing.rs +++ b/crates/salsa/src/plumbing.rs @@ -38,8 +38,15 @@ pub trait DatabaseOps { /// Gives access to the underlying salsa runtime. fn ops_salsa_runtime(&self) -> &Runtime; - /// Gives access to the underlying salsa runtime. - fn ops_salsa_runtime_mut(&mut self) -> &mut Runtime; + /// A "synthetic write" causes the system to act *as though* some + /// input of durability `durability` has changed. This is mostly + /// useful for profiling scenarios. + /// + /// **WARNING:** Just like an ordinary write, this method triggers + /// cancellation. If you invoke it while a snapshot exists, it + /// will block until that snapshot is dropped -- if that snapshot + /// is owned by the current thread, this could trigger deadlock. + fn synthetic_write(&mut self, durability: Durability); /// Formats a database key index in a human readable fashion. fn fmt_index( diff --git a/crates/salsa/tests/incremental/memoized_volatile.rs b/crates/salsa/tests/incremental/memoized_volatile.rs index 6dc5030063b..3dcc32eece3 100644 --- a/crates/salsa/tests/incremental/memoized_volatile.rs +++ b/crates/salsa/tests/incremental/memoized_volatile.rs @@ -58,7 +58,7 @@ fn revalidate() { // Second generation: volatile will change (to 1) but memoized1 // will not (still 0, as 1/2 = 0) - query.salsa_runtime_mut().synthetic_write(Durability::LOW); + query.synthetic_write(Durability::LOW); query.memoized2(); query.assert_log(&["Volatile invoked", "Memoized1 invoked"]); query.memoized2(); @@ -67,7 +67,7 @@ fn revalidate() { // Third generation: volatile will change (to 2) and memoized1 // will too (to 1). Therefore, after validating that Memoized1 // changed, we now invoke Memoized2. - query.salsa_runtime_mut().synthetic_write(Durability::LOW); + query.synthetic_write(Durability::LOW); query.memoized2(); query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]); diff --git a/crates/salsa/tests/on_demand_inputs.rs b/crates/salsa/tests/on_demand_inputs.rs index 5d0e4866442..677d633ee7c 100644 --- a/crates/salsa/tests/on_demand_inputs.rs +++ b/crates/salsa/tests/on_demand_inputs.rs @@ -111,7 +111,7 @@ fn on_demand_input_durability() { } "#]].assert_debug_eq(&events); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); + db.synthetic_write(Durability::LOW); events.replace(vec![]); assert_eq!(db.c(1), 10); assert_eq!(db.c(2), 20); @@ -128,7 +128,7 @@ fn on_demand_input_durability() { } "#]].assert_debug_eq(&events); - db.salsa_runtime_mut().synthetic_write(Durability::HIGH); + db.synthetic_write(Durability::HIGH); events.replace(vec![]); assert_eq!(db.c(1), 10); assert_eq!(db.c(2), 20); diff --git a/crates/salsa/tests/storage_varieties/tests.rs b/crates/salsa/tests/storage_varieties/tests.rs index f75c7c142fe..8e2f9b03cb9 100644 --- a/crates/salsa/tests/storage_varieties/tests.rs +++ b/crates/salsa/tests/storage_varieties/tests.rs @@ -20,7 +20,7 @@ fn volatile_twice() { let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same assert_eq!(v1, v2); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + db.synthetic_write(Durability::LOW); // clears volatile caches let v3 = db.volatile(); // will re-increment the counter let v4 = db.volatile(); // second call will be cached @@ -40,7 +40,7 @@ fn intermingled() { assert_eq!(v1, v3); assert_eq!(v2, v4); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + db.synthetic_write(Durability::LOW); // clears volatile caches let v5 = db.memoized(); // re-executes volatile, caches new result let v6 = db.memoized(); // re-use cached result From cdfb73ab9c702be655a0164d79eb0ca0a8942384 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 22:25:55 +0100 Subject: [PATCH 70/92] fix: Fix proc-macro server not accounting for string delimiters correctly --- crates/proc-macro-srv/src/proc_macros.rs | 6 +-- crates/proc-macro-srv/src/server.rs | 9 +++- .../src/server/rust_analyzer_span.rs | 43 ++++++++++-------- crates/proc-macro-srv/src/server/token_id.rs | 45 +++++++++++-------- .../proc-macro-srv/src/server/token_stream.rs | 11 ++--- crates/proc-macro-srv/src/tests/mod.rs | 26 ++++++++--- 6 files changed, 85 insertions(+), 55 deletions(-) diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 3fe968c81ca..686d5b0438a 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -64,7 +64,7 @@ impl ProcMacros { &bridge::server::SameThread, S::make_server(call_site, def_site, mixed_site), parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) @@ -75,7 +75,7 @@ impl ProcMacros { &bridge::server::SameThread, S::make_server(call_site, def_site, mixed_site), parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) @@ -87,7 +87,7 @@ impl ProcMacros { S::make_server(call_site, def_site, mixed_site), parsed_attributes, parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index ff8fd295d88..5a814e23e7a 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -93,7 +93,14 @@ impl LiteralFormatter { let hashes = get_hashes_str(n); f(&["br", hashes, "\"", symbol, "\"", hashes, suffix]) } - _ => f(&[symbol, suffix]), + bridge::LitKind::CStr => f(&["c\"", symbol, "\"", suffix]), + bridge::LitKind::CStrRaw(n) => { + let hashes = get_hashes_str(n); + f(&["cr", hashes, "\"", symbol, "\"", hashes, suffix]) + } + bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => { + f(&[symbol, suffix]) + } }) } diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index c6a0a666555..e0d708559db 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -97,22 +97,33 @@ impl server::FreeFunctions for RaSpanServer { } let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; - let kind = match kind { - LiteralKind::Int { .. } => LitKind::Integer, - LiteralKind::Float { .. } => LitKind::Float, - LiteralKind::Char { .. } => LitKind::Char, - LiteralKind::Byte { .. } => LitKind::Byte, - LiteralKind::Str { .. } => LitKind::Str, - LiteralKind::ByteStr { .. } => LitKind::ByteStr, - LiteralKind::CStr { .. } => LitKind::CStr, - LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), - LiteralKind::RawByteStr { n_hashes } => { - LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) - } - LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + let (kind, start_offset, end_offset) = match kind { + LiteralKind::Int { .. } => (LitKind::Integer, 0, 0), + LiteralKind::Float { .. } => (LitKind::Float, 0, 0), + LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize), + LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize), + LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize), + LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize), + LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize), + LiteralKind::RawStr { n_hashes } => ( + LitKind::StrRaw(n_hashes.unwrap_or_default()), + 2 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawByteStr { n_hashes } => ( + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawCStr { n_hashes } => ( + LitKind::CStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), }; let (lit, suffix) = s.split_at(suffix_start as usize); + let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, suffix => Some(Symbol::intern(self.interner, suffix)), @@ -248,12 +259,8 @@ impl server::TokenStream for RaSpanServer { } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, span: lit.span, + ..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap() }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 7e9d8057ac9..d1622ab026b 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -89,22 +89,34 @@ impl server::FreeFunctions for TokenIdServer { } let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; - let kind = match kind { - LiteralKind::Int { .. } => LitKind::Integer, - LiteralKind::Float { .. } => LitKind::Float, - LiteralKind::Char { .. } => LitKind::Char, - LiteralKind::Byte { .. } => LitKind::Byte, - LiteralKind::Str { .. } => LitKind::Str, - LiteralKind::ByteStr { .. } => LitKind::ByteStr, - LiteralKind::CStr { .. } => LitKind::CStr, - LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), - LiteralKind::RawByteStr { n_hashes } => { - LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) - } - LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + + let (kind, start_offset, end_offset) = match kind { + LiteralKind::Int { .. } => (LitKind::Integer, 0, 0), + LiteralKind::Float { .. } => (LitKind::Float, 0, 0), + LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize), + LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize), + LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize), + LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize), + LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize), + LiteralKind::RawStr { n_hashes } => ( + LitKind::StrRaw(n_hashes.unwrap_or_default()), + 2 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawByteStr { n_hashes } => ( + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawCStr { n_hashes } => ( + LitKind::CStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), }; let (lit, suffix) = s.split_at(suffix_start as usize); + let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, suffix => Some(Symbol::intern(self.interner, suffix)), @@ -233,12 +245,9 @@ impl server::TokenStream for TokenIdServer { } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, span: lit.span, + ..server::FreeFunctions::literal_from_str(self, &lit.text) + .unwrap_or_else(|_| panic!("`{}`", lit.text)) }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 5edaa720fc7..408db60e872 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -115,8 +115,6 @@ pub(super) mod token_stream { } } - type LexError = String; - /// Attempts to break the string into tokens and parse those tokens into a token stream. /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters /// or characters not existing in the language. @@ -124,13 +122,10 @@ pub(super) mod token_stream { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - #[rustfmt::skip] - impl /*FromStr for*/ TokenStream { - // type Err = LexError; - - pub(crate) fn from_str(src: &str, call_site: S) -> Result, LexError> { + impl TokenStream { + pub(crate) fn from_str(src: &str, call_site: S) -> Result, String> { let subtree = - mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; + mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index e5bfe5ee92c..54a20357d26 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,7 +169,7 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###, expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 @@ -181,11 +181,17 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL ""hello bridge"" 1 + LITERAL "hello bridge" 1 PUNCH , [alone] 1 - LITERAL ""suffixed""suffix 1 + LITERAL "suffixed"suffix 1 PUNCH , [alone] 1 - LITERAL r##"r##"raw"##"## 1"###]], + LITERAL r##"raw"## 1 + PUNCH , [alone] 1 + LITERAL 'a' 1 + PUNCH , [alone] 1 + LITERAL b'b' 1 + PUNCH , [alone] 1 + LITERAL c"null" 1"###]], expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -197,11 +203,17 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], + LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } From efa6948b577f702eda9c85a1adf26ddc327f0261 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 22:32:39 +0100 Subject: [PATCH 71/92] Fix rust-analyzer not enabling rust-analyzer spans on the proc-macro server --- crates/proc-macro-api/src/process.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 12eafcea442..72f95643c8b 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -45,7 +45,7 @@ impl ProcMacroProcessSrv { }) }; let mut srv = create_srv(true)?; - tracing::info!("sending version check"); + tracing::info!("sending proc-macro server version check"); match srv.version_check() { Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( io::ErrorKind::Other, @@ -55,14 +55,15 @@ impl ProcMacroProcessSrv { ), )), Ok(v) => { - tracing::info!("got version {v}"); + tracing::info!("Proc-macro server version: {v}"); srv = create_srv(false)?; srv.version = v; - if srv.version > RUST_ANALYZER_SPAN_SUPPORT { + if srv.version >= RUST_ANALYZER_SPAN_SUPPORT { if let Ok(mode) = srv.enable_rust_analyzer_spans() { srv.mode = mode; } } + tracing::info!("Proc-macro server span mode: {:?}", srv.mode); Ok(srv) } Err(e) => { From 9b7284dca7690d3ffdecf3c9eb6e88afe07ec01a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 10:10:19 +0100 Subject: [PATCH 72/92] fix: Fix deadlock in recreate_crate_graph <-> file_line_index --- crates/rust-analyzer/src/reload.rs | 44 +++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 00494ca5ba0..f6bc032c019 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -522,13 +522,14 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - { + // crate graph construction relies on these paths, record them so when one of them gets + // deleted or created we trigger a reconstruction of the crate graph + let mut crate_graph_file_dependencies = FxHashSet::default(); + + let (crate_graph, proc_macro_paths, layouts, toolchains) = { // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; - // crate graph construction relies on these paths, record them so when one of them gets - // deleted or created we trigger a reconstruction of the crate graph - let mut crate_graph_file_dependencies = FxHashSet::default(); let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); @@ -545,25 +546,24 @@ impl GlobalState { } }; - let (crate_graph, proc_macro_paths, layouts, toolchains) = - ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); - - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); - } - change.set_crate_graph(crate_graph); - change.set_target_data_layouts(layouts); - change.set_toolchains(toolchains); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load) + }; + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } + change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; + self.process_changes(); self.reload_flycheck(); } From 6dca7948f771cb2e0d3e1461acb03b2268075f02 Mon Sep 17 00:00:00 2001 From: cui fliter Date: Fri, 23 Feb 2024 18:45:03 +0800 Subject: [PATCH 73/92] remove repetitive words Signed-off-by: cui fliter --- crates/hir-ty/src/db.rs | 2 +- crates/salsa/src/runtime/dependency_graph.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 42313ff52b1..f9e8cff5539 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -90,7 +90,7 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::cycle(crate::lower::ty_recover)] fn ty(&self, def: TyDefId) -> Binders; - /// Returns the type of the value of the given constant, or `None` if the the `ValueTyDefId` is + /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. #[salsa::invoke(crate::lower::value_ty_query)] fn value_ty(&self, def: ValueTyDefId) -> Option>; diff --git a/crates/salsa/src/runtime/dependency_graph.rs b/crates/salsa/src/runtime/dependency_graph.rs index e41eb280dee..dd223eeeba9 100644 --- a/crates/salsa/src/runtime/dependency_graph.rs +++ b/crates/salsa/src/runtime/dependency_graph.rs @@ -12,7 +12,7 @@ type QueryStack = Vec; #[derive(Debug, Default)] pub(super) struct DependencyGraph { - /// A `(K -> V)` pair in this map indicates that the the runtime + /// A `(K -> V)` pair in this map indicates that the runtime /// `K` is blocked on some query executing in the runtime `V`. /// This encodes a graph that must be acyclic (or else deadlock /// will result). From cc4d0e1bd1600cb892a25a57bdeeb70ad258c153 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 21:13:52 +0100 Subject: [PATCH 74/92] Optimize salsa some more --- Cargo.lock | 1 + crates/salsa/Cargo.toml | 1 + crates/salsa/salsa-macros/src/query_group.rs | 4 +- crates/salsa/src/derived.rs | 25 ++-- crates/salsa/src/derived/slot.rs | 122 +++++++++++-------- crates/salsa/src/durability.rs | 4 +- crates/salsa/src/input.rs | 53 ++++---- crates/salsa/src/interned.rs | 16 +-- crates/salsa/src/lib.rs | 2 +- crates/salsa/src/plumbing.rs | 4 +- crates/salsa/src/revision.rs | 2 +- crates/salsa/src/runtime.rs | 52 ++++---- crates/salsa/src/runtime/local_state.rs | 7 +- 13 files changed, 146 insertions(+), 147 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b29d7bb798..3c87291dbad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1709,6 +1709,7 @@ dependencies = [ "dissimilar", "expect-test", "indexmap", + "itertools", "linked-hash-map", "lock_api", "oorandom", diff --git a/crates/salsa/Cargo.toml b/crates/salsa/Cargo.toml index 4ccbc3de846..9eec21f6a15 100644 --- a/crates/salsa/Cargo.toml +++ b/crates/salsa/Cargo.toml @@ -21,6 +21,7 @@ rustc-hash = "1.0" smallvec = "1.0.0" oorandom = "11" triomphe = "0.1.11" +itertools.workspace = true salsa-macros = { version = "0.0.0", path = "salsa-macros" } diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs index 5d1678ef120..a868d920b66 100644 --- a/crates/salsa/salsa-macros/src/query_group.rs +++ b/crates/salsa/salsa-macros/src/query_group.rs @@ -526,7 +526,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream fmt_ops.extend(quote! { #query_index => { salsa::plumbing::QueryStorageOps::fmt_index( - &*self.#fn_name, db, input, fmt, + &*self.#fn_name, db, input.key_index(), fmt, ) } }); @@ -537,7 +537,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream maybe_changed_ops.extend(quote! { #query_index => { salsa::plumbing::QueryStorageOps::maybe_changed_after( - &*self.#fn_name, db, input, revision + &*self.#fn_name, db, input.key_index(), revision ) } }); diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index d6316710058..bf532bdccf6 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -102,13 +102,13 @@ where let mut write = self.slot_map.write(); let entry = write.entry(key.clone()); - let key_index = u32::try_from(entry.index()).unwrap(); + let key_index = entry.index() as u32; let database_key_index = DatabaseKeyIndex { group_index: self.group_index, query_index: Q::QUERY_INDEX, key_index, }; - entry.or_insert_with(|| Arc::new(Slot::new(key.clone(), database_key_index))).clone() + entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone() } } @@ -131,34 +131,33 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); let slot_map = self.slot_map.read(); - let key = slot_map.get_index(index.key_index as usize).unwrap().0; + let key = slot_map.get_index(index as usize).unwrap().0; write!(fmt, "{}({:?})", Q::QUERY_NAME, key) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); - let slot = self.slot_map.read().get_index(input.key_index as usize).unwrap().1.clone(); - slot.maybe_changed_after(db, revision) + let read = &self.slot_map.read(); + let Some((key, slot)) = read.get_index(index as usize) else { + return false; + }; + slot.maybe_changed_after(db, revision, key) } fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { db.unwind_if_cancelled(); let slot = self.slot(key); - let StampedValue { value, durability, changed_at } = slot.read(db); + let StampedValue { value, durability, changed_at } = slot.read(db, key); if let Some(evicted) = self.lru_list.record_use(&slot) { evicted.evict(); @@ -182,7 +181,7 @@ where C: std::iter::FromIterator>, { let slot_map = self.slot_map.read(); - slot_map.values().filter_map(|slot| slot.as_table_entry()).collect() + slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect() } } diff --git a/crates/salsa/src/derived/slot.rs b/crates/salsa/src/derived/slot.rs index 4fad791a26a..75204c8ff60 100644 --- a/crates/salsa/src/derived/slot.rs +++ b/crates/salsa/src/derived/slot.rs @@ -26,8 +26,8 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - key: Q::Key, - database_key_index: DatabaseKeyIndex, + key_index: u32, + group_index: u16, state: RwLock>, policy: PhantomData, lru_index: LruIndex, @@ -110,10 +110,10 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - pub(super) fn new(key: Q::Key, database_key_index: DatabaseKeyIndex) -> Self { + pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self { Self { - key, - database_key_index, + key_index: database_key_index.key_index, + group_index: database_key_index.group_index, state: RwLock::new(QueryState::NotComputed), lru_index: LruIndex::default(), policy: PhantomData, @@ -121,10 +121,18 @@ where } pub(super) fn database_key_index(&self) -> DatabaseKeyIndex { - self.database_key_index + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: self.key_index, + } } - pub(super) fn read(&self, db: &>::DynDb) -> StampedValue { + pub(super) fn read( + &self, + db: &>::DynDb, + key: &Q::Key, + ) -> StampedValue { let runtime = db.salsa_runtime(); // NB: We don't need to worry about people modifying the @@ -147,7 +155,7 @@ where } } - self.read_upgrade(db, revision_now) + self.read_upgrade(db, key, revision_now) } /// Second phase of a read operation: acquires an upgradable-read @@ -157,6 +165,7 @@ where fn read_upgrade( &self, db: &>::DynDb, + key: &Q::Key, revision_now: Revision, ) -> StampedValue { let runtime = db.salsa_runtime(); @@ -186,8 +195,8 @@ where } }; - let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); - let active_query = runtime.push_query(self.database_key_index); + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); // If we have an old-value, it *may* now be stale, since there // has been a new revision since the last time we checked. So, @@ -200,7 +209,7 @@ where db.salsa_event(Event { runtime_id: runtime.id(), kind: EventKind::DidValidateMemoizedValue { - database_key: self.database_key_index, + database_key: self.database_key_index(), }, }); @@ -210,7 +219,7 @@ where } } - self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo) + self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key) } fn execute( @@ -221,22 +230,23 @@ where active_query: ActiveQueryGuard<'_>, panic_guard: PanicGuard<'_, Q, MP>, old_memo: Option>, + key: &Q::Key, ) -> StampedValue { - tracing::info!("{:?}: executing query", self.database_key_index.debug(db)); + tracing::info!("{:?}: executing query", self.database_key_index().debug(db)); db.salsa_event(Event { runtime_id: db.salsa_runtime().id(), - kind: EventKind::WillExecute { database_key: self.database_key_index }, + kind: EventKind::WillExecute { database_key: self.database_key_index() }, }); // Query was not previously executed, or value is potentially // stale, or value is absent. Let's execute! - let value = match Cycle::catch(|| Q::execute(db, self.key.clone())) { + let value = match Cycle::catch(|| Q::execute(db, key.clone())) { Ok(v) => v, Err(cycle) => { tracing::debug!( "{:?}: caught cycle {:?}, have strategy {:?}", - self.database_key_index.debug(db), + self.database_key_index().debug(db), cycle, Q::CYCLE_STRATEGY, ); @@ -248,12 +258,12 @@ where crate::plumbing::CycleRecoveryStrategy::Fallback => { if let Some(c) = active_query.take_cycle() { assert!(c.is(&cycle)); - Q::cycle_fallback(db, &cycle, &self.key) + Q::cycle_fallback(db, &cycle, key) } else { // we are not a participant in this cycle debug_assert!(!cycle .participant_keys() - .any(|k| k == self.database_key_index)); + .any(|k| k == self.database_key_index())); cycle.throw() } } @@ -303,7 +313,7 @@ where }; let memo_value = - if self.should_memoize_value(&self.key) { Some(new_value.value.clone()) } else { None }; + if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None }; debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); @@ -395,13 +405,11 @@ where } } - pub(super) fn as_table_entry(&self) -> Option> { + pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option> { match &*self.state.read() { QueryState::NotComputed => None, - QueryState::InProgress { .. } => Some(TableEntry::new(self.key.clone(), None)), - QueryState::Memoized(memo) => { - Some(TableEntry::new(self.key.clone(), memo.value.clone())) - } + QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)), + QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())), } } @@ -436,6 +444,7 @@ where &self, db: &>::DynDb, revision: Revision, + key: &Q::Key, ) -> bool { let runtime = db.salsa_runtime(); let revision_now = runtime.current_revision(); @@ -458,7 +467,7 @@ where MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, MaybeChangedSinceProbeState::Stale(state) => { drop(state); - return self.maybe_changed_after_upgrade(db, revision); + return self.maybe_changed_after_upgrade(db, revision, key); } } } @@ -495,6 +504,7 @@ where &self, db: &>::DynDb, revision: Revision, + key: &Q::Key, ) -> bool { let runtime = db.salsa_runtime(); let revision_now = runtime.current_revision(); @@ -513,7 +523,9 @@ where // If another thread was active, then the cache line is going to be // either verified or cleared out. Just recurse to figure out which. // Note that we don't need an upgradable read. - MaybeChangedSinceProbeState::Retry => return self.maybe_changed_after(db, revision), + MaybeChangedSinceProbeState::Retry => { + return self.maybe_changed_after(db, revision, key) + } MaybeChangedSinceProbeState::Stale(state) => { type RwLockUpgradableReadGuard<'a, T> = @@ -527,8 +539,8 @@ where } }; - let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); - let active_query = runtime.push_query(self.database_key_index); + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) { let maybe_changed = old_memo.revisions.changed_at > revision; @@ -538,8 +550,15 @@ where // We found that this memoized value may have changed // but we have an old value. We can re-run the code and // actually *check* if it has changed. - let StampedValue { changed_at, .. } = - self.execute(db, runtime, revision_now, active_query, panic_guard, Some(old_memo)); + let StampedValue { changed_at, .. } = self.execute( + db, + runtime, + revision_now, + active_query, + panic_guard, + Some(old_memo), + key, + ); changed_at > revision } else { // We found that inputs to this memoized value may have chanced @@ -560,7 +579,7 @@ where ) { runtime.block_on_or_unwind( db.ops_database(), - self.database_key_index, + self.database_key_index(), other_id, mutex_guard, ) @@ -585,7 +604,6 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - database_key_index: DatabaseKeyIndex, slot: &'me Slot, runtime: &'me Runtime, } @@ -595,12 +613,8 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - fn new( - database_key_index: DatabaseKeyIndex, - slot: &'me Slot, - runtime: &'me Runtime, - ) -> Self { - Self { database_key_index, slot, runtime } + fn new(slot: &'me Slot, runtime: &'me Runtime) -> Self { + Self { slot, runtime } } /// Indicates that we have concluded normally (without panicking). @@ -616,17 +630,18 @@ where /// inserted; if others were blocked, waiting for us to finish, /// then notify them. fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option>) { - let mut write = self.slot.state.write(); + let old_value = { + let mut write = self.slot.state.write(); + match opt_memo { + // Replace the `InProgress` marker that we installed with the new + // memo, thus releasing our unique access to this key. + Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), - let old_value = match opt_memo { - // Replace the `InProgress` marker that we installed with the new - // memo, thus releasing our unique access to this key. - Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), - - // We had installed an `InProgress` marker, but we panicked before - // it could be removed. At this point, we therefore "own" unique - // access to our slot, so we can just remove the key. - None => std::mem::replace(&mut *write, QueryState::NotComputed), + // We had installed an `InProgress` marker, but we panicked before + // it could be removed. At this point, we therefore "own" unique + // access to our slot, so we can just remove the key. + None => std::mem::replace(&mut *write, QueryState::NotComputed), + } }; match old_value { @@ -638,7 +653,8 @@ where // acquire a mutex; the mutex will guarantee that all writes // we are interested in are visible. if anyone_waiting.load(Ordering::Relaxed) { - self.runtime.unblock_queries_blocked_on(self.database_key_index, wait_result); + self.runtime + .unblock_queries_blocked_on(self.slot.database_key_index(), wait_result); } } _ => panic!( @@ -692,10 +708,10 @@ where return None; } if self.verify_revisions(db, revision_now, active_query) { - Some(StampedValue { + self.value.clone().map(|value| StampedValue { durability: self.revisions.durability, changed_at: self.revisions.changed_at, - value: self.value.as_ref().unwrap().clone(), + value, }) } else { None @@ -748,7 +764,7 @@ where // input changed *again*. QueryInputs::Tracked { inputs } => { let changed_input = - inputs.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); + inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); if let Some(input) = changed_input { debug!("validate_memoized_value: `{:?}` may have changed", input); @@ -788,7 +804,7 @@ where MP: MemoizationPolicy, { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{:?}({:?})", Q::default(), self.key) + write!(fmt, "{:?}", Q::default()) } } diff --git a/crates/salsa/src/durability.rs b/crates/salsa/src/durability.rs index 0c82f6345ab..44abae3170f 100644 --- a/crates/salsa/src/durability.rs +++ b/crates/salsa/src/durability.rs @@ -42,9 +42,9 @@ impl Durability { pub(crate) const MAX: Durability = Self::HIGH; /// Number of durability levels. - pub(crate) const LEN: usize = 3; + pub(crate) const LEN: usize = Self::MAX.index() + 1; - pub(crate) fn index(self) -> usize { + pub(crate) const fn index(self) -> usize { self.0 as usize } } diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs index c2539570e0f..922ec5a7752 100644 --- a/crates/salsa/src/input.rs +++ b/crates/salsa/src/input.rs @@ -29,7 +29,7 @@ where } struct Slot { - database_key_index: DatabaseKeyIndex, + key_index: u32, stamped_value: RwLock>, } @@ -54,27 +54,25 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); let slot_map = self.slots.read(); - let key = slot_map.get_index(index.key_index as usize).unwrap().0; + let key = slot_map.get_index(index as usize).unwrap().0; write!(fmt, "{}({:?})", Q::QUERY_NAME, key) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); let slots = &self.slots.read(); - let slot = slots.get_index(input.key_index as usize).unwrap().1; + let Some((_, slot)) = slots.get_index(index as usize) else { + return true; + }; debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); @@ -96,7 +94,11 @@ where let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone(); db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( - slot.database_key_index, + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: slot.key_index, + }, durability, changed_at, ); @@ -174,16 +176,8 @@ where } Entry::Vacant(entry) => { - let key_index = u32::try_from(entry.index()).unwrap(); - let database_key_index = DatabaseKeyIndex { - group_index: self.group_index, - query_index: Q::QUERY_INDEX, - key_index, - }; - entry.insert(Slot { - database_key_index, - stamped_value: RwLock::new(stamped_value), - }); + let key_index = entry.index() as u32; + entry.insert(Slot { key_index, stamped_value: RwLock::new(stamped_value) }); None } } @@ -196,7 +190,6 @@ pub struct UnitInputStorage where Q: Query, { - group_index: u16, slot: UnitSlot, } @@ -222,36 +215,32 @@ where fn new(group_index: u16) -> Self { let database_key_index = DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 }; - UnitInputStorage { - group_index, - slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) }, - } + UnitInputStorage { slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) } } } fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + _index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); write!(fmt, "{}", Q::QUERY_NAME) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + _index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); - let changed_at = self.slot.stamped_value.read().as_ref().unwrap().changed_at; + let Some(value) = &*self.slot.stamped_value.read() else { + return true; + }; + let changed_at = value.changed_at; debug!("maybe_changed_after: changed_at = {:?}", changed_at); diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs index 822219f5185..c065e7e2bde 100644 --- a/crates/salsa/src/interned.rs +++ b/crates/salsa/src/interned.rs @@ -265,12 +265,10 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); - let intern_id = InternId::from(index.key_index); + let intern_id = InternId::from(index); let slot = self.lookup_value(intern_id); write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value) } @@ -278,13 +276,11 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + input: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); - let intern_id = InternId::from(input.key_index); + let intern_id = InternId::from(input); let slot = self.lookup_value(intern_id); slot.maybe_changed_after(revision) } @@ -388,7 +384,7 @@ where fn fmt_index( &self, db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { let group_storage = @@ -400,7 +396,7 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + input: u32, revision: Revision, ) -> bool { let group_storage = diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 98b3a48e37c..fe807598873 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -54,7 +54,7 @@ pub trait Database: plumbing::DatabaseOps { /// runtime. It permits the database to be customized and to /// inject logging or other custom behavior. fn salsa_event(&self, event_fn: Event) { - #![allow(unused_variables)] + _ = event_fn; } /// Starts unwinding the stack if the current revision is cancelled. diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs index b8df87fd5e5..1a8ff33b2ef 100644 --- a/crates/salsa/src/plumbing.rs +++ b/crates/salsa/src/plumbing.rs @@ -173,7 +173,7 @@ where fn fmt_index( &self, db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result; @@ -186,7 +186,7 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool; // ANCHOR_END:maybe_changed_after diff --git a/crates/salsa/src/revision.rs b/crates/salsa/src/revision.rs index d97aaf9deba..559b0338608 100644 --- a/crates/salsa/src/revision.rs +++ b/crates/salsa/src/revision.rs @@ -46,7 +46,7 @@ pub(crate) struct AtomicRevision { } impl AtomicRevision { - pub(crate) fn start() -> Self { + pub(crate) const fn start() -> Self { Self { data: AtomicU32::new(START) } } diff --git a/crates/salsa/src/runtime.rs b/crates/salsa/src/runtime.rs index 40b8856991f..a7d5a245782 100644 --- a/crates/salsa/src/runtime.rs +++ b/crates/salsa/src/runtime.rs @@ -4,13 +4,14 @@ use crate::hash::FxIndexSet; use crate::plumbing::CycleRecoveryStrategy; use crate::revision::{AtomicRevision, Revision}; use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind}; +use itertools::Itertools; use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive}; use parking_lot::{Mutex, RwLock}; use std::hash::Hash; use std::panic::panic_any; -use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::atomic::{AtomicU32, Ordering}; use tracing::debug; -use triomphe::Arc; +use triomphe::{Arc, ThinArc}; mod dependency_graph; use dependency_graph::DependencyGraph; @@ -297,8 +298,7 @@ impl Runtime { // (at least for this execution, not necessarily across executions), // no matter where it started on the stack. Find the minimum // key and rotate it to the front. - let min = v.iter().min().unwrap(); - let index = v.iter().position(|p| p == min).unwrap(); + let index = v.iter().position_min().unwrap_or_default(); v.rotate_left(index); // No need to store extra memory. @@ -440,7 +440,7 @@ impl Runtime { /// State that will be common to all threads (when we support multiple threads) struct SharedState { /// Stores the next id to use for a snapshotted runtime (starts at 1). - next_id: AtomicUsize, + next_id: AtomicU32, /// Whenever derived queries are executing, they acquire this lock /// in read mode. Mutating inputs (and thus creating a new @@ -457,50 +457,46 @@ struct SharedState { /// revision is cancelled). pending_revision: AtomicRevision, - /// Stores the "last change" revision for values of each duration. + /// Stores the "last change" revision for values of each Durability. /// This vector is always of length at least 1 (for Durability 0) - /// but its total length depends on the number of durations. The + /// but its total length depends on the number of Durabilities. The /// element at index 0 is special as it represents the "current /// revision". In general, we have the invariant that revisions /// in here are *declining* -- that is, `revisions[i] >= /// revisions[i + 1]`, for all `i`. This is because when you /// modify a value with durability D, that implies that values /// with durability less than D may have changed too. - revisions: Vec, + revisions: [AtomicRevision; Durability::LEN], /// The dependency graph tracks which runtimes are blocked on one /// another, waiting for queries to terminate. dependency_graph: Mutex, } -impl SharedState { - fn with_durabilities(durabilities: usize) -> Self { +impl std::panic::RefUnwindSafe for SharedState {} + +impl Default for SharedState { + fn default() -> Self { + #[allow(clippy::declare_interior_mutable_const)] + const START: AtomicRevision = AtomicRevision::start(); SharedState { - next_id: AtomicUsize::new(1), + next_id: AtomicU32::new(1), query_lock: Default::default(), - revisions: (0..durabilities).map(|_| AtomicRevision::start()).collect(), - pending_revision: AtomicRevision::start(), + revisions: [START; Durability::LEN], + pending_revision: START, dependency_graph: Default::default(), } } } -impl std::panic::RefUnwindSafe for SharedState {} - -impl Default for SharedState { - fn default() -> Self { - Self::with_durabilities(Durability::LEN) - } -} - impl std::fmt::Debug for SharedState { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let query_lock = if self.query_lock.try_write().is_some() { - "" - } else if self.query_lock.try_read().is_some() { + let query_lock = if self.query_lock.is_locked_exclusive() { + "" + } else if self.query_lock.is_locked() { "" } else { - "" + "" }; fmt.debug_struct("SharedState") .field("query_lock", &query_lock) @@ -570,7 +566,9 @@ impl ActiveQuery { if dependencies.is_empty() { QueryInputs::NoInputs } else { - QueryInputs::Tracked { inputs: dependencies.iter().copied().collect() } + QueryInputs::Tracked { + inputs: ThinArc::from_header_and_iter((), dependencies.iter().copied()), + } } } }; @@ -616,7 +614,7 @@ impl ActiveQuery { /// complete, its `RuntimeId` may potentially be re-used. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct RuntimeId { - counter: usize, + counter: u32, } #[derive(Clone, Debug)] diff --git a/crates/salsa/src/runtime/local_state.rs b/crates/salsa/src/runtime/local_state.rs index 91b95dffe78..7ac21dec1a8 100644 --- a/crates/salsa/src/runtime/local_state.rs +++ b/crates/salsa/src/runtime/local_state.rs @@ -1,5 +1,6 @@ //! use tracing::debug; +use triomphe::ThinArc; use crate::durability::Durability; use crate::runtime::ActiveQuery; @@ -7,7 +8,6 @@ use crate::runtime::Revision; use crate::Cycle; use crate::DatabaseKeyIndex; use std::cell::RefCell; -use triomphe::Arc; /// State that is specific to a single execution thread. /// @@ -43,7 +43,7 @@ pub(crate) struct QueryRevisions { #[derive(Debug, Clone)] pub(crate) enum QueryInputs { /// Non-empty set of inputs, fully known - Tracked { inputs: Arc<[DatabaseKeyIndex]> }, + Tracked { inputs: ThinArc<(), DatabaseKeyIndex> }, /// Empty set of inputs, fully known. NoInputs, @@ -145,8 +145,7 @@ impl LocalState { /// the current thread is blocking. The stack must be restored /// with [`Self::restore_query_stack`] when the thread unblocks. pub(super) fn take_query_stack(&self) -> Vec { - assert!(self.query_stack.borrow().is_some(), "query stack already taken"); - self.query_stack.take().unwrap() + self.query_stack.take().expect("query stack already taken") } /// Restores a query stack taken with [`Self::take_query_stack`] once From e656844833e0e71416a84bf692240a7a8b809e0a Mon Sep 17 00:00:00 2001 From: NikitaShyrei Date: Fri, 23 Feb 2024 16:35:07 +0100 Subject: [PATCH 75/92] moved tests file --- library/std/src/sys/{pal/sgx/rwlock => locks/rwlock/sgx}/tests.rs | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename library/std/src/sys/{pal/sgx/rwlock => locks/rwlock/sgx}/tests.rs (100%) diff --git a/library/std/src/sys/pal/sgx/rwlock/tests.rs b/library/std/src/sys/locks/rwlock/sgx/tests.rs similarity index 100% rename from library/std/src/sys/pal/sgx/rwlock/tests.rs rename to library/std/src/sys/locks/rwlock/sgx/tests.rs From c6a6e63a458064e8f7cdbe6f5992a195274eab0a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 17:24:29 +0100 Subject: [PATCH 76/92] internal: Pin commit of rust-lang/rust for rustc-test metrics --- crates/rust-analyzer/src/cli/rustc_tests.rs | 1 + xtask/src/metrics.rs | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index b3b6da1f698..7062b60cbfc 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -276,6 +276,7 @@ impl flags::RustcTests { pub fn run(self) -> Result<()> { let mut tester = Tester::new()?; let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); + eprintln!("Running tests for tests/ui"); for i in walk_dir { let i = i?; let p = i.into_path(); diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 2efafa10a82..285abb9efcb 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -86,7 +86,11 @@ impl Metrics { fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { eprintln!("\nMeasuring rustc tests"); - cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?; + cmd!( + sh, + "git clone --depth=1 --branch 1.76.0 https://github.com/rust-lang/rust.git --single-branch" + ) + .run()?; let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; for (metric, value, unit) in parse_metrics(&output) { From d9a08624aad55a91f839e6ee3acf7117d197cda9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 19:31:53 +0100 Subject: [PATCH 77/92] internal: Disable rustc test metrics --- .github/workflows/metrics.yaml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index be9f504e599..87a1bd53a5c 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -67,7 +67,7 @@ jobs: other_metrics: strategy: matrix: - names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] + names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] runs-on: ubuntu-latest needs: [setup_cargo, build_metrics] @@ -118,11 +118,6 @@ jobs: with: name: self-${{ github.sha }} - - name: Download rustc_tests metrics - uses: actions/download-artifact@v3 - with: - name: rustc_tests-${{ github.sha }} - - name: Download ripgrep-13.0.0 metrics uses: actions/download-artifact@v3 with: @@ -151,7 +146,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 From 0c3a524acbbaa1ddf6e3dc014bd51da570db7c79 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 20:22:01 +0100 Subject: [PATCH 78/92] Fix: Fix metrics CI failing --- .github/workflows/metrics.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index 87a1bd53a5c..de61b2389ae 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -146,7 +146,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 From 64779737db21f9e506f4562d9923e7037e15ecfb Mon Sep 17 00:00:00 2001 From: David Barsky Date: Fri, 23 Feb 2024 15:34:23 -0500 Subject: [PATCH 79/92] internal: fix deadlock introduced by #16643 --- crates/salsa/src/derived.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index bf532bdccf6..153df999f53 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -146,11 +146,14 @@ where revision: Revision, ) -> bool { debug_assert!(revision < db.salsa_runtime().current_revision()); - let read = &self.slot_map.read(); + let read = self.slot_map.read(); let Some((key, slot)) = read.get_index(index as usize) else { return false; }; - slot.maybe_changed_after(db, revision, key) + let (key, slot) = (key.clone(), slot.clone()); + // note: this drop is load-bearing. removing it would causes deadlocks. + drop(read); + slot.maybe_changed_after(db, revision, &key) } fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { From 30429f8ece19d701cff8f5c547c2069fbf651cfd Mon Sep 17 00:00:00 2001 From: Graeme Read Date: Sat, 24 Feb 2024 06:45:00 +0000 Subject: [PATCH 80/92] feat: Add short flag -V for consistency with other rust tooling --- crates/rust-analyzer/src/cli/flags.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 493e614dce6..3f68c5d053b 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -30,7 +30,7 @@ xflags::xflags! { default cmd lsp-server { /// Print version. - optional --version + optional -V, --version /// Dump a LSP config JSON schema. optional --print-config-schema From 93ec0e6299e31e6857e8ad741750034f35762b11 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Thu, 4 Jan 2024 19:06:37 +0000 Subject: [PATCH 81/92] Stabilize `cfg_target_abi` --- compiler/rustc_feature/src/accepted.rs | 2 + compiler/rustc_feature/src/builtin_attrs.rs | 1 - compiler/rustc_feature/src/unstable.rs | 2 - .../src/spec/targets/armv6_unknown_freebsd.rs | 3 +- .../targets/armv6_unknown_netbsd_eabihf.rs | 2 - .../src/spec/targets/armv7_unknown_freebsd.rs | 3 +- .../targets/armv7_unknown_netbsd_eabihf.rs | 2 - library/unwind/src/lib.rs | 2 +- tests/ui/cfg/cfg-target-abi.rs | 1 - tests/ui/check-cfg/well-known-values.rs | 1 - tests/ui/check-cfg/well-known-values.stderr | 56 +++++++++---------- .../feature-gate-cfg-target-abi.rs | 13 ----- .../feature-gate-cfg-target-abi.stderr | 43 -------------- 13 files changed, 33 insertions(+), 98 deletions(-) delete mode 100644 tests/ui/feature-gates/feature-gate-cfg-target-abi.rs delete mode 100644 tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 18f6ae35054..1b2993dabdb 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -84,6 +84,8 @@ declare_features! ( (accepted, cfg_doctest, "1.40.0", Some(62210)), /// Enables `#[cfg(panic = "...")]` config key. (accepted, cfg_panic, "1.60.0", Some(77443)), + /// Allows `cfg(target_abi = "...")`. + (accepted, cfg_target_abi, "CURRENT_RUSTC_VERSION", Some(80970)), /// Allows `cfg(target_feature = "...")`. (accepted, cfg_target_feature, "1.27.0", Some(29717)), /// Allows `cfg(target_vendor = "...")`. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 99875ec5405..5b4221a9f40 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -25,7 +25,6 @@ pub type GatedCfg = (Symbol, Symbol, GateFn); const GATED_CFGS: &[GatedCfg] = &[ // (name in cfg, feature, function to check if the feature is enabled) (sym::overflow_checks, sym::cfg_overflow_checks, cfg_fn!(cfg_overflow_checks)), - (sym::target_abi, sym::cfg_target_abi, cfg_fn!(cfg_target_abi)), (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)), ( sym::target_has_atomic_equal_alignment, diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 93c183a65ef..8eea4b7d8a8 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -373,8 +373,6 @@ declare_features! ( (unstable, cfg_sanitize, "1.41.0", Some(39699)), /// Allows `cfg(sanitizer_cfi_generalize_pointers)` and `cfg(sanitizer_cfi_normalize_integers)`. (unstable, cfg_sanitizer_cfi, "1.77.0", Some(89653)), - /// Allows `cfg(target_abi = "...")`. - (unstable, cfg_target_abi, "1.55.0", Some(80970)), /// Allows `cfg(target(abi = "..."))`. (unstable, cfg_target_compact, "1.63.0", Some(96901)), /// Allows `cfg(target_has_atomic_load_store = "...")`. diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs index 22f6ee81055..70e40f60f22 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs index 84d9ceac04d..ca0db5e5640 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs index 9f4a432c6fa..61b6d7a63e3 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs index e5518c6daec..7afdb87b62e 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/library/unwind/src/lib.rs b/library/unwind/src/lib.rs index f5988a4df13..a64f2904633 100644 --- a/library/unwind/src/lib.rs +++ b/library/unwind/src/lib.rs @@ -3,7 +3,7 @@ #![feature(link_cfg)] #![feature(staged_api)] #![feature(c_unwind)] -#![feature(cfg_target_abi)] +#![cfg_attr(bootstrap, feature(cfg_target_abi))] #![feature(strict_provenance)] #![cfg_attr(not(target_env = "msvc"), feature(libc))] #![allow(internal_features)] diff --git a/tests/ui/cfg/cfg-target-abi.rs b/tests/ui/cfg/cfg-target-abi.rs index 5d13337c1c3..306ae077325 100644 --- a/tests/ui/cfg/cfg-target-abi.rs +++ b/tests/ui/cfg/cfg-target-abi.rs @@ -1,5 +1,4 @@ //@ run-pass -#![feature(cfg_target_abi)] #[cfg(target_abi = "eabihf")] pub fn main() { diff --git a/tests/ui/check-cfg/well-known-values.rs b/tests/ui/check-cfg/well-known-values.rs index 0c55e35a993..859a36c604c 100644 --- a/tests/ui/check-cfg/well-known-values.rs +++ b/tests/ui/check-cfg/well-known-values.rs @@ -10,7 +10,6 @@ #![feature(cfg_overflow_checks)] #![feature(cfg_relocation_model)] #![feature(cfg_sanitize)] -#![feature(cfg_target_abi)] #![feature(cfg_target_has_atomic)] #![feature(cfg_target_has_atomic_equal_alignment)] #![feature(cfg_target_thread_local)] diff --git a/tests/ui/check-cfg/well-known-values.stderr b/tests/ui/check-cfg/well-known-values.stderr index 6535cd9a1a1..5f52421fef5 100644 --- a/tests/ui/check-cfg/well-known-values.stderr +++ b/tests/ui/check-cfg/well-known-values.stderr @@ -1,5 +1,5 @@ warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:26:5 + --> $DIR/well-known-values.rs:25:5 | LL | clippy = "_UNEXPECTED_VALUE", | ^^^^^^---------------------- @@ -11,7 +11,7 @@ LL | clippy = "_UNEXPECTED_VALUE", = note: `#[warn(unexpected_cfgs)]` on by default warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:28:5 + --> $DIR/well-known-values.rs:27:5 | LL | debug_assertions = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^---------------------- @@ -22,7 +22,7 @@ LL | debug_assertions = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:30:5 + --> $DIR/well-known-values.rs:29:5 | LL | doc = "_UNEXPECTED_VALUE", | ^^^---------------------- @@ -33,7 +33,7 @@ LL | doc = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:32:5 + --> $DIR/well-known-values.rs:31:5 | LL | doctest = "_UNEXPECTED_VALUE", | ^^^^^^^---------------------- @@ -44,7 +44,7 @@ LL | doctest = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:34:5 + --> $DIR/well-known-values.rs:33:5 | LL | miri = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -55,7 +55,7 @@ LL | miri = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:36:5 + --> $DIR/well-known-values.rs:35:5 | LL | overflow_checks = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^---------------------- @@ -66,7 +66,7 @@ LL | overflow_checks = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:38:5 + --> $DIR/well-known-values.rs:37:5 | LL | panic = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -75,7 +75,7 @@ LL | panic = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:40:5 + --> $DIR/well-known-values.rs:39:5 | LL | proc_macro = "_UNEXPECTED_VALUE", | ^^^^^^^^^^---------------------- @@ -86,7 +86,7 @@ LL | proc_macro = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:42:5 + --> $DIR/well-known-values.rs:41:5 | LL | relocation_model = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -95,7 +95,7 @@ LL | relocation_model = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:44:5 + --> $DIR/well-known-values.rs:43:5 | LL | sanitize = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -104,7 +104,7 @@ LL | sanitize = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:46:5 + --> $DIR/well-known-values.rs:45:5 | LL | target_abi = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -113,7 +113,7 @@ LL | target_abi = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:48:5 + --> $DIR/well-known-values.rs:47:5 | LL | target_arch = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -122,7 +122,7 @@ LL | target_arch = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:50:5 + --> $DIR/well-known-values.rs:49:5 | LL | target_endian = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -131,16 +131,16 @@ LL | target_endian = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:52:5 + --> $DIR/well-known-values.rs:51:5 | LL | target_env = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: expected values for `target_env` are: ``, `eabihf`, `gnu`, `gnueabihf`, `msvc`, `musl`, `newlib`, `nto70`, `nto71`, `ohos`, `psx`, `relibc`, `sgx`, `uclibc` + = note: expected values for `target_env` are: ``, `gnu`, `msvc`, `musl`, `newlib`, `nto70`, `nto71`, `ohos`, `psx`, `relibc`, `sgx`, `uclibc` = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:54:5 + --> $DIR/well-known-values.rs:53:5 | LL | target_family = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -149,7 +149,7 @@ LL | target_family = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:56:5 + --> $DIR/well-known-values.rs:55:5 | LL | target_feature = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -158,7 +158,7 @@ LL | target_feature = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:58:5 + --> $DIR/well-known-values.rs:57:5 | LL | target_has_atomic = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -167,7 +167,7 @@ LL | target_has_atomic = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:60:5 + --> $DIR/well-known-values.rs:59:5 | LL | target_has_atomic_equal_alignment = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -176,7 +176,7 @@ LL | target_has_atomic_equal_alignment = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:62:5 + --> $DIR/well-known-values.rs:61:5 | LL | target_has_atomic_load_store = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -185,7 +185,7 @@ LL | target_has_atomic_load_store = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:64:5 + --> $DIR/well-known-values.rs:63:5 | LL | target_os = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -194,7 +194,7 @@ LL | target_os = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:66:5 + --> $DIR/well-known-values.rs:65:5 | LL | target_pointer_width = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -203,7 +203,7 @@ LL | target_pointer_width = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:68:5 + --> $DIR/well-known-values.rs:67:5 | LL | target_thread_local = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^---------------------- @@ -214,7 +214,7 @@ LL | target_thread_local = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:70:5 + --> $DIR/well-known-values.rs:69:5 | LL | target_vendor = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -223,7 +223,7 @@ LL | target_vendor = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:72:5 + --> $DIR/well-known-values.rs:71:5 | LL | test = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -234,7 +234,7 @@ LL | test = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:74:5 + --> $DIR/well-known-values.rs:73:5 | LL | unix = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -245,7 +245,7 @@ LL | unix = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:76:5 + --> $DIR/well-known-values.rs:75:5 | LL | windows = "_UNEXPECTED_VALUE", | ^^^^^^^---------------------- @@ -256,7 +256,7 @@ LL | windows = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `linuz` - --> $DIR/well-known-values.rs:82:7 + --> $DIR/well-known-values.rs:81:7 | LL | #[cfg(target_os = "linuz")] // testing that we suggest `linux` | ^^^^^^^^^^^^------- diff --git a/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs b/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs deleted file mode 100644 index d005dc3ad45..00000000000 --- a/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs +++ /dev/null @@ -1,13 +0,0 @@ -#[cfg(target_abi = "x")] //~ ERROR `cfg(target_abi)` is experimental -struct Foo(u64, u64); - -#[cfg_attr(target_abi = "x", x)] //~ ERROR `cfg(target_abi)` is experimental -struct Bar(u64, u64); - -#[cfg(not(any(all(target_abi = "x"))))] //~ ERROR `cfg(target_abi)` is experimental -fn foo() {} - -fn main() { - cfg!(target_abi = "x"); - //~^ ERROR `cfg(target_abi)` is experimental and subject to change -} diff --git a/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr b/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr deleted file mode 100644 index 4829f8572cc..00000000000 --- a/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr +++ /dev/null @@ -1,43 +0,0 @@ -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:1:7 - | -LL | #[cfg(target_abi = "x")] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:4:12 - | -LL | #[cfg_attr(target_abi = "x", x)] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:7:19 - | -LL | #[cfg(not(any(all(target_abi = "x"))))] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:11:10 - | -LL | cfg!(target_abi = "x"); - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error: aborting due to 4 previous errors - -For more information about this error, try `rustc --explain E0658`. From 2401ae1896bf47d0c393dd21596817cf83153320 Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Sat, 24 Feb 2024 22:44:21 +0100 Subject: [PATCH 82/92] Make most bootstrap step types !Copy This makes all bootstrap types except for `Compiler` and `TargetSelection` `!Copy`. This makes it easier to modify them by adding !Copy types in the future and comes at no cost of code clarity, the impls were completely unused. --- src/bootstrap/src/core/build_steps/check.rs | 4 +- src/bootstrap/src/core/build_steps/clean.rs | 2 +- src/bootstrap/src/core/build_steps/compile.rs | 6 +- src/bootstrap/src/core/build_steps/dist.rs | 38 +++++------ src/bootstrap/src/core/build_steps/doc.rs | 20 +++--- src/bootstrap/src/core/build_steps/install.rs | 4 +- src/bootstrap/src/core/build_steps/llvm.rs | 10 +-- src/bootstrap/src/core/build_steps/run.rs | 18 ++--- src/bootstrap/src/core/build_steps/setup.rs | 6 +- .../src/core/build_steps/synthetic_targets.rs | 2 +- src/bootstrap/src/core/build_steps/test.rs | 66 +++++++++---------- src/bootstrap/src/core/build_steps/tool.rs | 20 +++--- src/bootstrap/src/core/builder.rs | 2 +- 13 files changed, 99 insertions(+), 99 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index 3ac60f15ef6..f6f4253a364 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -367,7 +367,7 @@ impl Step for CodegenBackend { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustAnalyzer { pub target: TargetSelection, } @@ -441,7 +441,7 @@ impl Step for RustAnalyzer { macro_rules! tool_check_step { ($name:ident, $path:literal, $($alias:literal, )* $source_type:path $(, $default:literal )?) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/clean.rs b/src/bootstrap/src/core/build_steps/clean.rs index 4b993945f19..17ca92f25a9 100644 --- a/src/bootstrap/src/core/build_steps/clean.rs +++ b/src/bootstrap/src/core/build_steps/clean.rs @@ -14,7 +14,7 @@ use crate::utils::cache::Interned; use crate::utils::helpers::t; use crate::{Build, Compiler, Mode, Subcommand}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CleanAll {} impl Step for CleanAll { diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index d349cd67fed..93881304421 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -727,7 +727,7 @@ fn apple_darwin_sign_file(file_path: &Path) { assert!(status.success()); } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct StartupObjects { pub compiler: Compiler, pub target: TargetSelection, @@ -1491,7 +1491,7 @@ pub fn compiler_file( PathBuf::from(out.trim()) } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Sysroot { pub compiler: Compiler, /// See [`Std::force_recompile`]. @@ -1653,7 +1653,7 @@ impl Step for Sysroot { } } -#[derive(Debug, Copy, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] pub struct Assemble { /// The compiler which we will produce in this step. Assemble itself will /// take care of ensuring that the necessary prerequisites to do so exist, diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index d9ab18e7250..fe50a787f9f 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -50,7 +50,7 @@ fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool { builder.config.tools.as_ref().map_or(true, |tools| tools.contains(tool)) } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Docs { pub host: TargetSelection, } @@ -83,7 +83,7 @@ impl Step for Docs { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct JsonDocs { pub host: TargetSelection, } @@ -121,7 +121,7 @@ impl Step for JsonDocs { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustcDocs { pub host: TargetSelection, } @@ -308,7 +308,7 @@ fn make_win_dist( } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Mingw { pub host: TargetSelection, } @@ -348,7 +348,7 @@ impl Step for Mingw { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rustc { pub compiler: Compiler, } @@ -476,7 +476,7 @@ impl Step for Rustc { let man_src = builder.src.join("src/doc/man"); let man_dst = image.join("share/man/man1"); - // don't use our `bootstrap::{copy, cp_r}`, because those try + // don't use our `bootstrap::{copy_internal, cp_r}`, because those try // to hardlink, and we don't want to edit the source templates for file_entry in builder.read_dir(&man_src) { let page_src = file_entry.path(); @@ -617,7 +617,7 @@ fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Std { pub compiler: Compiler, pub target: TargetSelection, @@ -664,7 +664,7 @@ impl Step for Std { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustcDev { pub compiler: Compiler, pub target: TargetSelection, @@ -723,7 +723,7 @@ impl Step for RustcDev { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Analysis { pub compiler: Compiler, pub target: TargetSelection, @@ -870,7 +870,7 @@ fn copy_src_dirs( } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Src; impl Step for Src { @@ -931,7 +931,7 @@ impl Step for Src { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct PlainSourceTarball; impl Step for PlainSourceTarball { @@ -1031,7 +1031,7 @@ impl Step for PlainSourceTarball { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Cargo { pub compiler: Compiler, pub target: TargetSelection, @@ -1080,7 +1080,7 @@ impl Step for Cargo { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rls { pub compiler: Compiler, pub target: TargetSelection, @@ -1122,7 +1122,7 @@ impl Step for Rls { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { pub compiler: Compiler, pub target: TargetSelection, @@ -1164,7 +1164,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Clippy { pub compiler: Compiler, pub target: TargetSelection, @@ -1212,7 +1212,7 @@ impl Step for Clippy { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Miri { pub compiler: Compiler, pub target: TargetSelection, @@ -1359,7 +1359,7 @@ impl Step for CodegenBackend { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rustfmt { pub compiler: Compiler, pub target: TargetSelection, @@ -1404,7 +1404,7 @@ impl Step for Rustfmt { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustDemangler { pub compiler: Compiler, pub target: TargetSelection, @@ -1460,7 +1460,7 @@ impl Step for RustDemangler { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Extended { stage: u32, host: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index 7a122a8676b..a4903ce2353 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -32,7 +32,7 @@ macro_rules! submodule_helper { macro_rules! book { ($($name:ident, $path:expr, $book_name:expr $(, submodule $(= $submodule:literal)? )? ;)+) => { $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { target: TargetSelection, } @@ -86,7 +86,7 @@ book!( StyleGuide, "src/doc/style-guide", "style-guide"; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct UnstableBook { target: TargetSelection, } @@ -160,7 +160,7 @@ impl Step for RustbookSrc

{ } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct TheBook { compiler: Compiler, target: TargetSelection, @@ -286,7 +286,7 @@ fn invoke_rustdoc( builder.run(&mut cmd); } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Standalone { compiler: Compiler, target: TargetSelection, @@ -389,7 +389,7 @@ impl Step for Standalone { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Releases { compiler: Compiler, target: TargetSelection, @@ -492,7 +492,7 @@ pub struct SharedAssetsPaths { pub version_info: PathBuf, } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct SharedAssets { target: TargetSelection, } @@ -872,7 +872,7 @@ macro_rules! tool_doc { $(is_library = $is_library:expr,)? $(crates = $crates:expr)? ) => { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $tool { target: TargetSelection, } @@ -1021,7 +1021,7 @@ tool_doc!( crates = ["bootstrap"] ); -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)] pub struct ErrorIndex { pub target: TargetSelection, } @@ -1056,7 +1056,7 @@ impl Step for ErrorIndex { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct UnstableBookGen { target: TargetSelection, } @@ -1112,7 +1112,7 @@ fn symlink_dir_force(config: &Config, original: &Path, link: &Path) { ); } -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)] pub struct RustcBook { pub compiler: Compiler, pub target: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs index 29238b90225..6726671ddd9 100644 --- a/src/bootstrap/src/core/build_steps/install.rs +++ b/src/bootstrap/src/core/build_steps/install.rs @@ -159,7 +159,7 @@ macro_rules! install { only_hosts: $only_hosts:expr, $run_item:block $(, $c:ident)*;)+) => { $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -303,7 +303,7 @@ install!((self, builder, _config), }; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Src { pub stage: u32, } diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs index 9622321a74e..4a92acfa3d9 100644 --- a/src/bootstrap/src/core/build_steps/llvm.rs +++ b/src/bootstrap/src/core/build_steps/llvm.rs @@ -242,7 +242,7 @@ pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Llvm { pub target: TargetSelection, } @@ -815,7 +815,7 @@ fn get_var(var_base: &str, host: &str, target: &str) -> Option { .or_else(|| env::var_os(var_base)) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Lld { pub target: TargetSelection, } @@ -937,7 +937,7 @@ impl Step for Lld { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Sanitizers { pub target: TargetSelection, } @@ -1147,7 +1147,7 @@ impl HashStamp { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrtBeginEnd { pub target: TargetSelection, } @@ -1215,7 +1215,7 @@ impl Step for CrtBeginEnd { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Libunwind { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 27b0c7760f0..61ee2fc1f6f 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -10,7 +10,7 @@ use crate::core::config::TargetSelection; use crate::utils::helpers::output; use crate::Mode; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ExpandYamlAnchors; impl Step for ExpandYamlAnchors { @@ -36,7 +36,7 @@ impl Step for ExpandYamlAnchors { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct BuildManifest; impl Step for BuildManifest { @@ -75,7 +75,7 @@ impl Step for BuildManifest { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct BumpStage0; impl Step for BumpStage0 { @@ -97,7 +97,7 @@ impl Step for BumpStage0 { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct ReplaceVersionPlaceholder; impl Step for ReplaceVersionPlaceholder { @@ -119,7 +119,7 @@ impl Step for ReplaceVersionPlaceholder { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Miri { stage: u32, host: TargetSelection, @@ -178,7 +178,7 @@ impl Step for Miri { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct CollectLicenseMetadata; impl Step for CollectLicenseMetadata { @@ -210,7 +210,7 @@ impl Step for CollectLicenseMetadata { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct GenerateCopyright; impl Step for GenerateCopyright { @@ -240,7 +240,7 @@ impl Step for GenerateCopyright { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct GenerateWindowsSys; impl Step for GenerateWindowsSys { @@ -262,7 +262,7 @@ impl Step for GenerateWindowsSys { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GenerateCompletions; macro_rules! generate_completions { diff --git a/src/bootstrap/src/core/build_steps/setup.rs b/src/bootstrap/src/core/build_steps/setup.rs index 74a5578b43e..7bc68b5aec1 100644 --- a/src/bootstrap/src/core/build_steps/setup.rs +++ b/src/bootstrap/src/core/build_steps/setup.rs @@ -233,7 +233,7 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { } /// Creates a toolchain link for stage1 using `rustup` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Link; impl Step for Link { type Output = (); @@ -444,7 +444,7 @@ fn prompt_user(prompt: &str) -> io::Result> { } /// Installs `src/etc/pre-push.sh` as a Git hook -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Hook; impl Step for Hook { @@ -516,7 +516,7 @@ undesirable, simply delete the `pre-push` file from .git/hooks." } /// Sets up or displays `src/etc/rust_analyzer_settings.json` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Vscode; impl Step for Vscode { diff --git a/src/bootstrap/src/core/build_steps/synthetic_targets.rs b/src/bootstrap/src/core/build_steps/synthetic_targets.rs index a00835402ec..89d50b5ffff 100644 --- a/src/bootstrap/src/core/build_steps/synthetic_targets.rs +++ b/src/bootstrap/src/core/build_steps/synthetic_targets.rs @@ -12,7 +12,7 @@ use crate::core::config::TargetSelection; use crate::Compiler; use std::process::{Command, Stdio}; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct MirOptPanicAbortSyntheticTarget { pub(crate) compiler: Compiler, pub(crate) base: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 791f847a866..a2c6e79d5e2 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -86,7 +86,7 @@ impl Step for CrateBootstrap { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Linkcheck { host: TargetSelection, } @@ -179,7 +179,7 @@ fn check_if_tidy_is_installed() -> bool { .map_or(false, |status| status.success()) } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HtmlCheck { target: TargetSelection, } @@ -220,7 +220,7 @@ impl Step for HtmlCheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Cargotest { stage: u32, host: TargetSelection, @@ -266,7 +266,7 @@ impl Step for Cargotest { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Cargo { stage: u32, host: TargetSelection, @@ -327,7 +327,7 @@ impl Step for Cargo { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustAnalyzer { stage: u32, host: TargetSelection, @@ -386,7 +386,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Rustfmt { stage: u32, host: TargetSelection, @@ -433,7 +433,7 @@ impl Step for Rustfmt { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustDemangler { stage: u32, host: TargetSelection, @@ -492,7 +492,7 @@ impl Step for RustDemangler { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Miri { stage: u32, host: TargetSelection, @@ -699,7 +699,7 @@ impl Step for Miri { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CompiletestTest { host: TargetSelection, } @@ -747,7 +747,7 @@ impl Step for CompiletestTest { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Clippy { stage: u32, host: TargetSelection, @@ -815,7 +815,7 @@ fn path_for_cargo(builder: &Builder<'_>, compiler: Compiler) -> OsString { env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocTheme { pub compiler: Compiler, } @@ -852,7 +852,7 @@ impl Step for RustdocTheme { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocJSStd { pub target: TargetSelection, } @@ -912,7 +912,7 @@ impl Step for RustdocJSStd { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocJSNotStd { pub target: TargetSelection, pub compiler: Compiler, @@ -966,7 +966,7 @@ fn get_browser_ui_test_version(npm: &Path) -> Option { .or_else(|| get_browser_ui_test_version_inner(npm, true)) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocGUI { pub target: TargetSelection, pub compiler: Compiler, @@ -1060,7 +1060,7 @@ impl Step for RustdocGUI { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Tidy; impl Step for Tidy { @@ -1151,7 +1151,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ExpandYamlAnchors; impl Step for ExpandYamlAnchors { @@ -1251,7 +1251,7 @@ macro_rules! test_definitions { host: $host:expr, compare_mode: $compare_mode:expr }) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -1294,7 +1294,7 @@ macro_rules! coverage_test_alias { default: $default:expr, only_hosts: $only_hosts:expr $(,)? }) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -1376,7 +1376,7 @@ default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assem /// /// Each individual mode also has its own alias that will run the tests in /// just that mode. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Coverage { pub compiler: Compiler, pub target: TargetSelection, @@ -1438,7 +1438,7 @@ host_test!(CoverageRunRustdoc { }); // For the mir-opt suite we do not use macros, as we need custom behavior when blessing. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MirOpt { pub compiler: Compiler, pub target: TargetSelection, @@ -1494,7 +1494,7 @@ impl Step for MirOpt { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] struct Compiletest { compiler: Compiler, target: TargetSelection, @@ -2142,7 +2142,7 @@ impl BookTest { macro_rules! test_book { ($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => { $( - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { compiler: Compiler, } @@ -2187,7 +2187,7 @@ test_book!( EditionGuide, "src/doc/edition-guide", "edition-guide", default=false; ); -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ErrorIndex { compiler: Compiler, } @@ -2264,7 +2264,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustcGuide; impl Step for RustcGuide { @@ -2537,7 +2537,7 @@ impl Step for Crate { } /// Rustdoc is special in various ways, which is why this step is different from `Crate`. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateRustdoc { host: TargetSelection, } @@ -2638,7 +2638,7 @@ impl Step for CrateRustdoc { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateRustdocJsonTypes { host: TargetSelection, } @@ -2708,7 +2708,7 @@ impl Step for CrateRustdocJsonTypes { /// QEMU we have to build our own tools so we've got conditional dependencies /// on those programs as well. Note that the remote test client is built for /// the build target (us) and the server is built for the target. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RemoteCopyLibs { compiler: Compiler, target: TargetSelection, @@ -2754,7 +2754,7 @@ impl Step for RemoteCopyLibs { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Distcheck; impl Step for Distcheck { @@ -2824,7 +2824,7 @@ impl Step for Distcheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Bootstrap; impl Step for Bootstrap { @@ -2876,7 +2876,7 @@ impl Step for Bootstrap { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TierCheck { pub compiler: Compiler, } @@ -2926,7 +2926,7 @@ impl Step for TierCheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LintDocs { pub compiler: Compiler, pub target: TargetSelection, @@ -2959,7 +2959,7 @@ impl Step for LintDocs { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustInstaller; impl Step for RustInstaller { @@ -3020,7 +3020,7 @@ impl Step for RustInstaller { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TestHelpers { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index ba867a04ec5..889876f461d 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -15,7 +15,7 @@ use crate::Compiler; use crate::Mode; use crate::{gha, Kind}; -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum SourceType { InTree, Submodule, @@ -220,7 +220,7 @@ macro_rules! bootstrap_tool { $(,allow_features = $allow_features:expr)? ; )+) => { - #[derive(Copy, PartialEq, Eq, Clone)] + #[derive(PartialEq, Eq, Clone)] pub enum Tool { $( $name, @@ -241,7 +241,7 @@ macro_rules! bootstrap_tool { } $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -315,7 +315,7 @@ bootstrap_tool!( CoverageDump, "src/tools/coverage-dump", "coverage-dump"; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct ErrorIndex { pub compiler: Compiler, } @@ -369,7 +369,7 @@ impl Step for ErrorIndex { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RemoteTestServer { pub compiler: Compiler, pub target: TargetSelection, @@ -403,7 +403,7 @@ impl Step for RemoteTestServer { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct Rustdoc { /// This should only ever be 0 or 2. /// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here. @@ -515,7 +515,7 @@ impl Step for Rustdoc { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Cargo { pub compiler: Compiler, pub target: TargetSelection, @@ -560,7 +560,7 @@ impl Step for Cargo { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct LldWrapper { pub compiler: Compiler, pub target: TargetSelection, @@ -589,7 +589,7 @@ impl Step for LldWrapper { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { pub compiler: Compiler, pub target: TargetSelection, @@ -637,7 +637,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzerProcMacroSrv { pub compiler: Compiler, pub target: TargetSelection, diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 0ec5e16de1d..e169cba3c13 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -1069,7 +1069,7 @@ impl<'a> Builder<'a> { /// Returns the libdir where the standard library and other artifacts are /// found for a compiler's sysroot. pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct Libdir { compiler: Compiler, target: TargetSelection, From 24aa34858645e96316bb61f4a095d13486a56892 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Sat, 24 Feb 2024 15:38:55 -0700 Subject: [PATCH 83/92] Add test cases for inlining compiler-private items Closes #106421 This was already fixed by f5d43a052b9eb464e54af819143467954d814a24, but now the test cases are added. --- tests/rustdoc/auxiliary/issue-106421-force-unstable.rs | 9 +++++++++ tests/rustdoc/issue-106421-not-internal.rs | 8 ++++++++ tests/rustdoc/issue-106421.rs | 8 ++++++++ 3 files changed, 25 insertions(+) create mode 100644 tests/rustdoc/auxiliary/issue-106421-force-unstable.rs create mode 100644 tests/rustdoc/issue-106421-not-internal.rs create mode 100644 tests/rustdoc/issue-106421.rs diff --git a/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs b/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs new file mode 100644 index 00000000000..589d44c1f77 --- /dev/null +++ b/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs @@ -0,0 +1,9 @@ +//@ compile-flags: -Zforce-unstable-if-unmarked +#![crate_name="foo"] +pub struct FatalError; + +impl FatalError { + pub fn raise(self) -> ! { + loop {} + } +} diff --git a/tests/rustdoc/issue-106421-not-internal.rs b/tests/rustdoc/issue-106421-not-internal.rs new file mode 100644 index 00000000000..1d744fba53f --- /dev/null +++ b/tests/rustdoc/issue-106421-not-internal.rs @@ -0,0 +1,8 @@ +//@ aux-build:issue-106421-force-unstable.rs +//@ ignore-cross-compile +// This is the version where a non-compiler-internal crate inlines a compiler-internal one. +// In this case, the item shouldn't be documented, because regular users can't get at it. +extern crate foo; + +// @!has issue_106421_not_internal/struct.FatalError.html '//*[@id="method.raise"]' 'fn raise' +pub use foo::FatalError; diff --git a/tests/rustdoc/issue-106421.rs b/tests/rustdoc/issue-106421.rs new file mode 100644 index 00000000000..d4141a4ab0c --- /dev/null +++ b/tests/rustdoc/issue-106421.rs @@ -0,0 +1,8 @@ +//@ aux-build:issue-106421-force-unstable.rs +//@ ignore-cross-compile +//@ compile-flags: -Zforce-unstable-if-unmarked + +extern crate foo; + +// @has issue_106421/struct.FatalError.html '//*[@id="method.raise"]' 'fn raise' +pub use foo::FatalError; From 9caeabe2e2e9dab5356b06d2b1a46ae11cc59ea1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 25 Feb 2024 09:56:19 +0200 Subject: [PATCH 84/92] Add missing imports --- .../crates/hir-ty/src/mir/lower/pattern_matching.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index a6d5ce723e3..85c8d1685b8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,6 +1,6 @@ //! MIR lowering for patterns -use hir_def::AssocItemId; +use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; use crate::{ mir::lower::{ From 5ee6a5d704fe9259e9719dd5a31fa3f69b68489c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 25 Feb 2024 09:58:11 +0200 Subject: [PATCH 85/92] Avoid using cfg(FALSE) --- src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 40a195f7d95..e678a2fee13 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -742,9 +742,8 @@ pub(crate) fn adt_datum_query( phantom_data, }; - #[cfg(FALSE)] // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it - let variant_id_to_fields = |id: VariantId| { + let _variant_id_to_fields = |id: VariantId| { let variant_data = &id.variant_data(db.upcast()); let fields = if variant_data.fields().is_empty() { vec![] From 5b7786cd1d246fd0bbe748c8690fb99b1134d2be Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 8 Feb 2024 20:08:19 +0100 Subject: [PATCH 86/92] make non-PartialEq-typed consts as patterns a hard error --- compiler/rustc_lint/src/lib.rs | 5 ++ compiler/rustc_lint_defs/src/builtin.rs | 52 ------------------- compiler/rustc_mir_build/src/errors.rs | 14 ++--- .../src/thir/pattern/const_to_pat.rs | 15 +++--- .../ui/consts/const_in_pattern/issue-65466.rs | 7 +-- .../const_in_pattern/issue-65466.stderr | 21 ++------ .../match/issue-72896-non-partial-eq-const.rs | 4 +- .../issue-72896-non-partial-eq-const.stderr | 21 ++------ 8 files changed, 27 insertions(+), 112 deletions(-) diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index d8e12c04f75..844f87c3f50 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -527,6 +527,11 @@ fn register_builtins(store: &mut LintStore) { "no longer needed, see #93367 \ for more information", ); + store.register_removed( + "const_patterns_without_partial_eq", + "converted into hard error, see RFC #3535 \ + for more information", + ); } fn register_internals(store: &mut LintStore) { diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 84a050a242a..1cddb45428c 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -32,7 +32,6 @@ declare_lint_pass! { CONFLICTING_REPR_HINTS, CONST_EVALUATABLE_UNCHECKED, CONST_ITEM_MUTATION, - CONST_PATTERNS_WITHOUT_PARTIAL_EQ, DEAD_CODE, DEPRECATED, DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME, @@ -2342,57 +2341,6 @@ declare_lint! { }; } -declare_lint! { - /// The `const_patterns_without_partial_eq` lint detects constants that are used in patterns, - /// whose type does not implement `PartialEq`. - /// - /// ### Example - /// - /// ```rust,compile_fail - /// #![deny(const_patterns_without_partial_eq)] - /// - /// trait EnumSetType { - /// type Repr; - /// } - /// - /// enum Enum8 { } - /// impl EnumSetType for Enum8 { - /// type Repr = u8; - /// } - /// - /// #[derive(PartialEq, Eq)] - /// struct EnumSet { - /// __enumset_underlying: T::Repr, - /// } - /// - /// const CONST_SET: EnumSet = EnumSet { __enumset_underlying: 3 }; - /// - /// fn main() { - /// match CONST_SET { - /// CONST_SET => { /* ok */ } - /// _ => panic!("match fell through?"), - /// } - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// Previous versions of Rust accepted constants in patterns, even if those constants' types - /// did not have `PartialEq` implemented. The compiler falls back to comparing the value - /// field-by-field. In the future we'd like to ensure that pattern matching always - /// follows `PartialEq` semantics, so that trait bound will become a requirement for - /// matching on constants. - pub CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - Warn, - "constant in pattern does not implement `PartialEq`", - @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, - reference: "issue #116122 ", - }; -} - declare_lint! { /// The `ambiguous_associated_items` lint detects ambiguity between /// [associated items] and [enum variants]. diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 48b93ce0ac5..101f1cb9f2f 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -767,6 +767,14 @@ pub struct TypeNotStructural<'tcx> { pub non_sm_ty: Ty<'tcx>, } +#[derive(Diagnostic)] +#[diag(mir_build_non_partial_eq_match)] +pub struct TypeNotPartialEq<'tcx> { + #[primary_span] + pub span: Span, + pub non_peq_ty: Ty<'tcx>, +} + #[derive(Diagnostic)] #[diag(mir_build_invalid_pattern)] pub struct InvalidPattern<'tcx> { @@ -822,12 +830,6 @@ pub struct NontrivialStructuralMatch<'tcx> { pub non_sm_ty: Ty<'tcx>, } -#[derive(LintDiagnostic)] -#[diag(mir_build_non_partial_eq_match)] -pub struct NonPartialEqMatch<'tcx> { - pub non_peq_ty: Ty<'tcx>, -} - #[derive(Diagnostic)] #[diag(mir_build_pattern_not_covered, code = E0005)] pub(crate) struct PatternNotCovered<'s, 'tcx> { diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index c77c80d9f4b..09727f9b71b 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -16,7 +16,7 @@ use std::cell::Cell; use super::PatCtxt; use crate::errors::{ - IndirectStructuralMatch, InvalidPattern, NaNPattern, NonPartialEqMatch, PointerPattern, + IndirectStructuralMatch, InvalidPattern, NaNPattern, PointerPattern, TypeNotPartialEq, TypeNotStructural, UnionPattern, UnsizedPattern, }; @@ -208,15 +208,12 @@ impl<'tcx> ConstToPat<'tcx> { ); } - // Always check for `PartialEq`, even if we emitted other lints. (But not if there were - // any errors.) This ensures it shows up in cargo's future-compat reports as well. + // Always check for `PartialEq` if we had no other errors yet. if !self.type_has_partial_eq_impl(cv.ty()) { - self.tcx().emit_node_span_lint( - lint::builtin::CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - self.id, - self.span, - NonPartialEqMatch { non_peq_ty: cv.ty() }, - ); + let err = TypeNotPartialEq { span: self.span, non_peq_ty: cv.ty() }; + let e = self.tcx().dcx().emit_err(err); + let kind = PatKind::Error(e); + return Box::new(Pat { span: self.span, ty: cv.ty(), kind }); } } diff --git a/tests/ui/consts/const_in_pattern/issue-65466.rs b/tests/ui/consts/const_in_pattern/issue-65466.rs index 048fca762d5..62efce64876 100644 --- a/tests/ui/consts/const_in_pattern/issue-65466.rs +++ b/tests/ui/consts/const_in_pattern/issue-65466.rs @@ -1,7 +1,3 @@ -#![deny(indirect_structural_match)] - -//@ check-pass - #[derive(PartialEq, Eq)] enum O { Some(*const T), // Can also use PhantomData @@ -15,8 +11,7 @@ const C: &[O] = &[O::None]; fn main() { let x = O::None; match &[x][..] { - C => (), //~WARN: the type must implement `PartialEq` - //~| previously accepted + C => (), //~ERROR: the type must implement `PartialEq` _ => (), } } diff --git a/tests/ui/consts/const_in_pattern/issue-65466.stderr b/tests/ui/consts/const_in_pattern/issue-65466.stderr index 9c80cb3a849..7d5e5b5b0c6 100644 --- a/tests/ui/consts/const_in_pattern/issue-65466.stderr +++ b/tests/ui/consts/const_in_pattern/issue-65466.stderr @@ -1,23 +1,8 @@ -warning: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-65466.rs:18:9 +error: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` + --> $DIR/issue-65466.rs:14:9 | LL | C => (), | ^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default -warning: 1 warning emitted - -Future incompatibility report: Future breakage diagnostic: -warning: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-65466.rs:18:9 - | -LL | C => (), - | ^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default +error: aborting due to 1 previous error diff --git a/tests/ui/match/issue-72896-non-partial-eq-const.rs b/tests/ui/match/issue-72896-non-partial-eq-const.rs index d4972714608..f15eae83896 100644 --- a/tests/ui/match/issue-72896-non-partial-eq-const.rs +++ b/tests/ui/match/issue-72896-non-partial-eq-const.rs @@ -1,4 +1,3 @@ -//@ run-pass trait EnumSetType { type Repr; } @@ -17,8 +16,7 @@ const CONST_SET: EnumSet = EnumSet { __enumset_underlying: 3 }; fn main() { match CONST_SET { - CONST_SET => { /* ok */ } //~WARN: must implement `PartialEq` - //~| previously accepted + CONST_SET => { /* ok */ } //~ERROR: must implement `PartialEq` _ => panic!("match fell through?"), } } diff --git a/tests/ui/match/issue-72896-non-partial-eq-const.stderr b/tests/ui/match/issue-72896-non-partial-eq-const.stderr index a7fc0cfc054..4155586c160 100644 --- a/tests/ui/match/issue-72896-non-partial-eq-const.stderr +++ b/tests/ui/match/issue-72896-non-partial-eq-const.stderr @@ -1,23 +1,8 @@ -warning: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-72896-non-partial-eq-const.rs:20:9 +error: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` + --> $DIR/issue-72896-non-partial-eq-const.rs:19:9 | LL | CONST_SET => { /* ok */ } | ^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default -warning: 1 warning emitted - -Future incompatibility report: Future breakage diagnostic: -warning: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-72896-non-partial-eq-const.rs:20:9 - | -LL | CONST_SET => { /* ok */ } - | ^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default +error: aborting due to 1 previous error From b2fbb8a05392be976c67e3b0063203d5b049da5c Mon Sep 17 00:00:00 2001 From: Markus Reiter Date: Thu, 22 Feb 2024 14:59:52 +0100 Subject: [PATCH 87/92] Use generic `NonZero` in tests. --- tests/codegen/array-equality.rs | 4 +- tests/codegen/enum/enum-debug-niche-2.rs | 17 +- tests/codegen/function-arguments.rs | 8 +- tests/codegen/intrinsics/transmute-niched.rs | 6 +- tests/codegen/issues/issue-119422.rs | 22 +-- tests/codegen/loads.rs | 8 +- tests/codegen/option-as-slice.rs | 9 +- tests/codegen/option-nonzero-eq.rs | 8 +- tests/codegen/slice-ref-equality.rs | 8 +- tests/codegen/transmute-optimized.rs | 6 +- tests/debuginfo/msvc-pretty-enums.rs | 80 ++++---- tests/debuginfo/numeric-types.rs | 26 +-- .../instsimplify/combine_transmutes.rs | 4 +- tests/rustdoc/type-layout.rs | 2 +- tests/ui/abi/compatibility.rs | 9 +- .../consts/const-eval/raw-bytes.32bit.stderr | 114 +++++------ .../consts/const-eval/raw-bytes.64bit.stderr | 114 +++++------ tests/ui/consts/const-eval/raw-bytes.rs | 9 +- tests/ui/consts/const-eval/ub-nonnull.rs | 10 +- tests/ui/consts/const-eval/ub-nonnull.stderr | 14 +- tests/ui/consts/const-eval/valid-const.rs | 9 +- tests/ui/consts/ice-48279.rs | 2 +- tests/ui/consts/tuple-struct-constructors.rs | 7 +- .../intrinsics/panic-uninitialized-zeroed.rs | 12 +- tests/ui/issues/issue-64593.rs | 3 +- tests/ui/layout/unsafe-cell-hides-niche.rs | 28 +-- .../ui/layout/zero-sized-array-enum-niche.rs | 5 +- .../layout/zero-sized-array-enum-niche.stderr | 10 +- tests/ui/lint/clashing-extern-fn.rs | 33 ++-- tests/ui/lint/clashing-extern-fn.stderr | 50 ++--- tests/ui/lint/invalid_value.rs | 13 +- tests/ui/lint/invalid_value.stderr | 186 +++++++++--------- tests/ui/lint/lint-ctypes-enum.rs | 41 ++-- tests/ui/lint/lint-ctypes-enum.stderr | 44 ++--- .../overflowing-neg-nonzero.rs | 6 +- tests/ui/print_type_sizes/niche-filling.rs | 44 ++--- .../ui/structs-enums/enum-null-pointer-opt.rs | 5 +- .../enum-null-pointer-opt.stderr | 2 +- tests/ui/structs-enums/type-sizes.rs | 25 +-- .../core-std-import-order-issue-83564.rs | 7 +- .../core-std-import-order-issue-83564.stderr | 12 +- .../next-solver/specialization-transmute.rs | 7 +- .../specialization-transmute.stderr | 6 +- 43 files changed, 519 insertions(+), 516 deletions(-) diff --git a/tests/codegen/array-equality.rs b/tests/codegen/array-equality.rs index 94354228886..5b85da1d4a0 100644 --- a/tests/codegen/array-equality.rs +++ b/tests/codegen/array-equality.rs @@ -1,7 +1,7 @@ //@ compile-flags: -O -Z merge-functions=disabled //@ only-x86_64 - #![crate_type = "lib"] +#![feature(generic_nonzero)] // CHECK-LABEL: @array_eq_value #[no_mangle] @@ -63,7 +63,7 @@ pub fn array_eq_zero_short(x: [u16; 3]) -> bool { // CHECK-LABEL: @array_eq_none_short(i40 #[no_mangle] -pub fn array_eq_none_short(x: [Option; 5]) -> bool { +pub fn array_eq_none_short(x: [Option>; 5]) -> bool { // CHECK-NEXT: start: // CHECK-NEXT: %[[EQ:.+]] = icmp eq i40 %0, 0 // CHECK-NEXT: ret i1 %[[EQ]] diff --git a/tests/codegen/enum/enum-debug-niche-2.rs b/tests/codegen/enum/enum-debug-niche-2.rs index 4315741e0bd..25871885e7e 100644 --- a/tests/codegen/enum/enum-debug-niche-2.rs +++ b/tests/codegen/enum/enum-debug-niche-2.rs @@ -1,20 +1,17 @@ -// This tests that optimized enum debug info accurately reflects the enum layout. -// This is ignored for the fallback mode on MSVC due to problems with PDB. - -// -//@ ignore-msvc - +//! This tests that optimized enum debug info accurately reflects the enum layout. +//! This is ignored for the fallback mode on MSVC due to problems with PDB. +//! //@ compile-flags: -g -C no-prepopulate-passes - +//@ ignore-msvc +// // CHECK: {{.*}}DICompositeType{{.*}}tag: DW_TAG_variant_part,{{.*}}size: 32,{{.*}} // CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "Placeholder",{{.*}}extraData: i128 4294967295{{[,)].*}} // CHECK: {{.*}}DIDerivedType{{.*}}tag: DW_TAG_member,{{.*}}name: "Error",{{.*}}extraData: i128 0{{[,)].*}} - -#![feature(never_type)] +#![feature(generic_nonzero, never_type)] #[derive(Copy, Clone)] pub struct Entity { - private: std::num::NonZeroU32, + private: std::num::NonZero, } #[derive(Copy, Clone, PartialEq, Eq)] diff --git a/tests/codegen/function-arguments.rs b/tests/codegen/function-arguments.rs index 88cedcf46b6..b75c188f51a 100644 --- a/tests/codegen/function-arguments.rs +++ b/tests/codegen/function-arguments.rs @@ -1,10 +1,10 @@ //@ compile-flags: -O -C no-prepopulate-passes - #![crate_type = "lib"] #![feature(dyn_star)] +#![feature(generic_nonzero)] use std::mem::MaybeUninit; -use std::num::NonZeroU64; +use std::num::NonZero; use std::marker::PhantomPinned; use std::ptr::NonNull; @@ -70,13 +70,13 @@ pub fn int(x: u64) -> u64 { // CHECK: noundef i64 @nonzero_int(i64 noundef %x) #[no_mangle] -pub fn nonzero_int(x: NonZeroU64) -> NonZeroU64 { +pub fn nonzero_int(x: NonZero) -> NonZero { x } // CHECK: noundef i64 @option_nonzero_int(i64 noundef %x) #[no_mangle] -pub fn option_nonzero_int(x: Option) -> Option { +pub fn option_nonzero_int(x: Option>) -> Option> { x } diff --git a/tests/codegen/intrinsics/transmute-niched.rs b/tests/codegen/intrinsics/transmute-niched.rs index 7c448c82e4b..b5e0da1b2f5 100644 --- a/tests/codegen/intrinsics/transmute-niched.rs +++ b/tests/codegen/intrinsics/transmute-niched.rs @@ -2,11 +2,11 @@ //@ [OPT] compile-flags: -C opt-level=3 -C no-prepopulate-passes //@ [DBG] compile-flags: -C opt-level=0 -C no-prepopulate-passes //@ only-64bit (so I don't need to worry about usize) - #![crate_type = "lib"] +#![feature(generic_nonzero)] use std::mem::transmute; -use std::num::NonZeroU32; +use std::num::NonZero; #[repr(u8)] pub enum SmallEnum { @@ -130,7 +130,7 @@ pub unsafe fn check_enum_to_char(x: Minus100ToPlus100) -> char { // CHECK-LABEL: @check_swap_pair( #[no_mangle] -pub unsafe fn check_swap_pair(x: (char, NonZeroU32)) -> (NonZeroU32, char) { +pub unsafe fn check_swap_pair(x: (char, NonZero)) -> (NonZero, char) { // OPT: %0 = icmp ule i32 %x.0, 1114111 // OPT: call void @llvm.assume(i1 %0) // OPT: %1 = icmp uge i32 %x.0, 1 diff --git a/tests/codegen/issues/issue-119422.rs b/tests/codegen/issues/issue-119422.rs index 937fdcf28f5..19480b4dc9e 100644 --- a/tests/codegen/issues/issue-119422.rs +++ b/tests/codegen/issues/issue-119422.rs @@ -1,13 +1,13 @@ //! This test checks that compiler don't generate useless compares to zeros -//! for NonZero integer types. - +//! for `NonZero` integer types. +//! //@ compile-flags: -O --edition=2021 -Zmerge-functions=disabled //@ only-64bit (because the LLVM type of i64 for usize shows up) - #![crate_type = "lib"] +#![feature(generic_nonzero)] -use core::num::*; use core::ptr::NonNull; +use core::num::NonZero; // CHECK-LABEL: @check_non_null #[no_mangle] @@ -18,7 +18,7 @@ pub fn check_non_null(x: NonNull) -> bool { // CHECK-LABEL: @equals_zero_is_false_u8 #[no_mangle] -pub fn equals_zero_is_false_u8(x: NonZeroU8) -> bool { +pub fn equals_zero_is_false_u8(x: NonZero) -> bool { // CHECK-NOT: br // CHECK: ret i1 false // CHECK-NOT: br @@ -27,7 +27,7 @@ pub fn equals_zero_is_false_u8(x: NonZeroU8) -> bool { // CHECK-LABEL: @not_equals_zero_is_true_u8 #[no_mangle] -pub fn not_equals_zero_is_true_u8(x: NonZeroU8) -> bool { +pub fn not_equals_zero_is_true_u8(x: NonZero) -> bool { // CHECK-NOT: br // CHECK: ret i1 true // CHECK-NOT: br @@ -36,7 +36,7 @@ pub fn not_equals_zero_is_true_u8(x: NonZeroU8) -> bool { // CHECK-LABEL: @equals_zero_is_false_i8 #[no_mangle] -pub fn equals_zero_is_false_i8(x: NonZeroI8) -> bool { +pub fn equals_zero_is_false_i8(x: NonZero) -> bool { // CHECK-NOT: br // CHECK: ret i1 false // CHECK-NOT: br @@ -45,7 +45,7 @@ pub fn equals_zero_is_false_i8(x: NonZeroI8) -> bool { // CHECK-LABEL: @not_equals_zero_is_true_i8 #[no_mangle] -pub fn not_equals_zero_is_true_i8(x: NonZeroI8) -> bool { +pub fn not_equals_zero_is_true_i8(x: NonZero) -> bool { // CHECK-NOT: br // CHECK: ret i1 true // CHECK-NOT: br @@ -54,7 +54,7 @@ pub fn not_equals_zero_is_true_i8(x: NonZeroI8) -> bool { // CHECK-LABEL: @usize_try_from_u32 #[no_mangle] -pub fn usize_try_from_u32(x: NonZeroU32) -> NonZeroUsize { +pub fn usize_try_from_u32(x: NonZero) -> NonZero { // CHECK-NOT: br // CHECK: zext i32 %{{.*}} to i64 // CHECK-NOT: br @@ -64,7 +64,7 @@ pub fn usize_try_from_u32(x: NonZeroU32) -> NonZeroUsize { // CHECK-LABEL: @isize_try_from_i32 #[no_mangle] -pub fn isize_try_from_i32(x: NonZeroI32) -> NonZeroIsize { +pub fn isize_try_from_i32(x: NonZero) -> NonZero { // CHECK-NOT: br // CHECK: sext i32 %{{.*}} to i64 // CHECK-NOT: br @@ -74,7 +74,7 @@ pub fn isize_try_from_i32(x: NonZeroI32) -> NonZeroIsize { // CHECK-LABEL: @u64_from_nonzero_is_not_zero #[no_mangle] -pub fn u64_from_nonzero_is_not_zero(x: NonZeroU64)->bool { +pub fn u64_from_nonzero_is_not_zero(x: NonZero)->bool { // CHECK-NOT: br // CHECK: ret i1 false // CHECK-NOT: br diff --git a/tests/codegen/loads.rs b/tests/codegen/loads.rs index 0471d83c25a..b86b3dd3a19 100644 --- a/tests/codegen/loads.rs +++ b/tests/codegen/loads.rs @@ -1,9 +1,9 @@ //@ compile-flags: -C no-prepopulate-passes -Zmir-opt-level=0 - #![crate_type = "lib"] +#![feature(generic_nonzero)] use std::mem::MaybeUninit; -use std::num::NonZeroU16; +use std::num::NonZero; pub struct Bytes { a: u8, @@ -99,14 +99,14 @@ pub fn load_int(x: &u16) -> u16 { // CHECK-LABEL: @load_nonzero_int #[no_mangle] -pub fn load_nonzero_int(x: &NonZeroU16) -> NonZeroU16 { +pub fn load_nonzero_int(x: &NonZero) -> NonZero { // CHECK: load i16, ptr %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}} *x } // CHECK-LABEL: @load_option_nonzero_int #[no_mangle] -pub fn load_option_nonzero_int(x: &Option) -> Option { +pub fn load_option_nonzero_int(x: &Option>) -> Option> { // CHECK: load i16, ptr %x, align 2, !noundef ![[NOUNDEF]]{{$}} *x } diff --git a/tests/codegen/option-as-slice.rs b/tests/codegen/option-as-slice.rs index 990ec1d1f66..14a39243607 100644 --- a/tests/codegen/option-as-slice.rs +++ b/tests/codegen/option-as-slice.rs @@ -1,14 +1,13 @@ //@ compile-flags: -O -Z randomize-layout=no //@ only-x86_64 //@ ignore-llvm-version: 16.0.0 -// ^ needs https://reviews.llvm.org/D146149 in 16.0.1 - +// ^-- needs https://reviews.llvm.org/D146149 in 16.0.1 #![crate_type = "lib"] -#![feature(option_as_slice)] +#![feature(generic_nonzero)] extern crate core; -use core::num::NonZeroU64; +use core::num::NonZero; use core::option::Option; // CHECK-LABEL: @u64_opt_as_slice @@ -23,7 +22,7 @@ pub fn u64_opt_as_slice(o: &Option) -> &[u64] { // CHECK-LABEL: @nonzero_u64_opt_as_slice #[no_mangle] -pub fn nonzero_u64_opt_as_slice(o: &Option) -> &[NonZeroU64] { +pub fn nonzero_u64_opt_as_slice(o: &Option>) -> &[NonZero] { // CHECK-NOT: select // CHECK-NOT: br // CHECK-NOT: switch diff --git a/tests/codegen/option-nonzero-eq.rs b/tests/codegen/option-nonzero-eq.rs index f6be90a5dde..f637b1aef97 100644 --- a/tests/codegen/option-nonzero-eq.rs +++ b/tests/codegen/option-nonzero-eq.rs @@ -1,18 +1,18 @@ //@ compile-flags: -O -Zmerge-functions=disabled - #![crate_type = "lib"] +#![feature(generic_nonzero)] extern crate core; use core::cmp::Ordering; -use core::num::{NonZeroU32, NonZeroI64}; use core::ptr::NonNull; +use core::num::NonZero; // See also tests/assembly/option-nonzero-eq.rs, for cases with `assume`s in the // LLVM and thus don't optimize down clearly here, but do in assembly. // CHECK-lABEL: @non_zero_eq #[no_mangle] -pub fn non_zero_eq(l: Option, r: Option) -> bool { +pub fn non_zero_eq(l: Option>, r: Option>) -> bool { // CHECK: start: // CHECK-NEXT: icmp eq i32 // CHECK-NEXT: ret i1 @@ -21,7 +21,7 @@ pub fn non_zero_eq(l: Option, r: Option) -> bool { // CHECK-lABEL: @non_zero_signed_eq #[no_mangle] -pub fn non_zero_signed_eq(l: Option, r: Option) -> bool { +pub fn non_zero_signed_eq(l: Option>, r: Option>) -> bool { // CHECK: start: // CHECK-NEXT: icmp eq i64 // CHECK-NEXT: ret i1 diff --git a/tests/codegen/slice-ref-equality.rs b/tests/codegen/slice-ref-equality.rs index 371e685ec6c..85d9c34a30b 100644 --- a/tests/codegen/slice-ref-equality.rs +++ b/tests/codegen/slice-ref-equality.rs @@ -1,8 +1,8 @@ //@ compile-flags: -O -Zmerge-functions=disabled - #![crate_type = "lib"] +#![feature(generic_nonzero)] -use std::num::{NonZeroI16, NonZeroU32}; +use std::num::NonZero; // #71602 reported a simple array comparison just generating a loop. // This was originally fixed by ensuring it generates a single bcmp, @@ -70,7 +70,7 @@ fn eq_slice_of_i32(x: &[i32], y: &[i32]) -> bool { // CHECK-SAME: [[USIZE:i16|i32|i64]] noundef %1 // CHECK-SAME: [[USIZE]] noundef %3 #[no_mangle] -fn eq_slice_of_nonzero(x: &[NonZeroU32], y: &[NonZeroU32]) -> bool { +fn eq_slice_of_nonzero(x: &[NonZero], y: &[NonZero]) -> bool { // CHECK: icmp eq [[USIZE]] %1, %3 // CHECK: %[[BYTES:.+]] = shl nsw [[USIZE]] %1, 2 // CHECK: tail call{{( noundef)?}} i32 @{{bcmp|memcmp}}(ptr @@ -82,7 +82,7 @@ fn eq_slice_of_nonzero(x: &[NonZeroU32], y: &[NonZeroU32]) -> bool { // CHECK-SAME: [[USIZE:i16|i32|i64]] noundef %1 // CHECK-SAME: [[USIZE]] noundef %3 #[no_mangle] -fn eq_slice_of_option_of_nonzero(x: &[Option], y: &[Option]) -> bool { +fn eq_slice_of_option_of_nonzero(x: &[Option>], y: &[Option>]) -> bool { // CHECK: icmp eq [[USIZE]] %1, %3 // CHECK: %[[BYTES:.+]] = shl nsw [[USIZE]] %1, 1 // CHECK: tail call{{( noundef)?}} i32 @{{bcmp|memcmp}}(ptr diff --git a/tests/codegen/transmute-optimized.rs b/tests/codegen/transmute-optimized.rs index 9217def76b5..1a5f53e625a 100644 --- a/tests/codegen/transmute-optimized.rs +++ b/tests/codegen/transmute-optimized.rs @@ -1,6 +1,6 @@ //@ compile-flags: -O -Z merge-functions=disabled - #![crate_type = "lib"] +#![feature(generic_nonzero)] // This tests that LLVM can optimize based on the niches in the source or // destination types for transmutes. @@ -33,7 +33,7 @@ pub fn non_null_is_null(x: std::ptr::NonNull) -> bool { // CHECK-LABEL: i1 @non_zero_is_null( #[no_mangle] -pub fn non_zero_is_null(x: std::num::NonZeroUsize) -> bool { +pub fn non_zero_is_null(x: std::num::NonZero) -> bool { // CHECK: ret i1 false let p: *const i32 = unsafe { std::mem::transmute(x) }; p.is_null() @@ -72,7 +72,7 @@ pub fn normal_div(a: u32, b: u32) -> u32 { // CHECK-LABEL: i32 @div_transmute_nonzero(i32 #[no_mangle] -pub fn div_transmute_nonzero(a: u32, b: std::num::NonZeroI32) -> u32 { +pub fn div_transmute_nonzero(a: u32, b: std::num::NonZero) -> u32 { // CHECK-NOT: call core::panicking::panic // CHECK: %[[R:.+]] = udiv i32 %a, %b // CHECK-NEXT: ret i32 %[[R]] diff --git a/tests/debuginfo/msvc-pretty-enums.rs b/tests/debuginfo/msvc-pretty-enums.rs index c6dd9f7939d..cfac14a22c4 100644 --- a/tests/debuginfo/msvc-pretty-enums.rs +++ b/tests/debuginfo/msvc-pretty-enums.rs @@ -1,143 +1,143 @@ //@ only-cdb //@ compile-flags:-g - +// // cdb-command: g - +// // cdb-command: dx a // cdb-check:a : Some [Type: enum2$ >] // cdb-check: [+0x000] __0 : Low (0x2) [Type: msvc_pretty_enums::CStyleEnum] - +// // cdb-command: dx b // cdb-check:b : None [Type: enum2$ >] - +// // cdb-command: dx c // cdb-check:c : Tag1 [Type: enum2$] - +// // cdb-command: dx d // cdb-check:d : Data [Type: enum2$] // cdb-check: [+0x000] my_data : High (0x10) [Type: msvc_pretty_enums::CStyleEnum] - +// // cdb-command: dx e // cdb-check:e : Tag2 [Type: enum2$] - +// // cdb-command: dx f // cdb-check:f : Some [Type: enum2$ > >] // cdb-check: [+0x000] __0 : 0x[...] : 0x1 [Type: unsigned int *] - +// // cdb-command: dx g // cdb-check:g : None [Type: enum2$ > >] - +// // cdb-command: dx h // cdb-check:h : Some [Type: enum2$ >] // cdb-check: [+0x004] __0 : 0xc [Type: unsigned int] - +// // cdb-command: dx i // cdb-check:i : None [Type: enum2$ >] - +// // cdb-command: dx j // cdb-check:j : High (0x10) [Type: msvc_pretty_enums::CStyleEnum] - +// // cdb-command: dx k // cdb-check:k : Some [Type: enum2$ >] // cdb-check: [+0x000] __0 : "IAMA optional string!" [Type: alloc::string::String] - +// // cdb-command: dx l // cdb-check:l : Ok [Type: enum2$ > >] // cdb-check: [+0x000] __0 : 0x2a [Type: unsigned int] - +// // cdb-command: dx niche128_some // cdb-check: niche128_some : Some [Type: enum2$ > >] // Note: we can't actually read the value of the field because CDB cannot handle 128 bit integers. // cdb-check: [+0x000] __0 [...] [Type: core::num::nonzero::NonZero] - +// // cdb-command: dx niche128_none // cdb-check: niche128_none : None [Type: enum2$ > >] - +// // cdb-command: dx wrapping_niche128_untagged // cdb-check: wrapping_niche128_untagged : X [Type: enum2$] // cdb-check: [+0x[...]] __0 [Type: msvc_pretty_enums::Wrapping128] - +// // cdb-command: dx wrapping_niche128_none1 // cdb-check: wrapping_niche128_none1 : Y [Type: enum2$] // cdb-check: [+0x[...]] __0 [Type: msvc_pretty_enums::Wrapping128] - +// // cdb-command: dx wrapping_niche128_none2 // cdb-check: wrapping_niche128_none2 : Z [Type: enum2$] // cdb-check: [+0x[...]] __0 [Type: msvc_pretty_enums::Wrapping128] - +// // cdb-command: dx direct_tag_128_a,d // cdb-check: direct_tag_128_a,d : A [Type: enum2$] // cdb-check: [+0x[...]] __0 : 42 [Type: unsigned int] - +// // cdb-command: dx direct_tag_128_b,d // cdb-check: direct_tag_128_b,d : B [Type: enum2$] // cdb-check: [+0x[...]] __0 : 137 [Type: unsigned int] - +// // cdb-command: dx niche_w_fields_1_some,d // cdb-check: niche_w_fields_1_some,d : A [Type: enum2$] // cdb-check: [+0x[...]] __0 : 0x[...] : 77 [Type: unsigned char *] // cdb-check: [+0x[...]] __1 : 7 [Type: unsigned int] - +// // cdb-command: dx niche_w_fields_1_none,d // cdb-check: niche_w_fields_1_none,d : B [Type: enum2$] // cdb-check: [+0x[...]] __0 : 99 [Type: unsigned int] - +// // cdb-command: dx niche_w_fields_2_some,d // cdb-check: niche_w_fields_2_some,d : A [Type: enum2$] // cdb-check: [+0x[...]] __0 : 800 [Type: core::num::nonzero::NonZero] // cdb-check: [+0x[...]] __1 : 900 [Type: unsigned __int64] - +// // cdb-command: dx niche_w_fields_2_none,d // cdb-check: niche_w_fields_2_none,d : B [Type: enum2$] // cdb-check: [+0x[...]] __0 : 1000 [Type: unsigned __int64] - +// // cdb-command: dx niche_w_fields_3_some,d // cdb-check: niche_w_fields_3_some,d : A [Type: enum2$] // cdb-check: [+0x[...]] __0 : 137 [Type: unsigned char] // cdb-check: [+0x[...]] __1 : true [Type: bool] - +// // cdb-command: dx niche_w_fields_3_niche1,d // cdb-check: niche_w_fields_3_niche1,d : B [Type: enum2$] // cdb-check: [+0x[...]] __0 : 12 [Type: unsigned char] - +// // cdb-command: dx niche_w_fields_3_niche2,d // cdb-check: niche_w_fields_3_niche2,d : C [Type: enum2$] // cdb-check: [+0x[...]] __0 : false [Type: bool] - +// // cdb-command: dx niche_w_fields_3_niche3,d // cdb-check: niche_w_fields_3_niche3,d : D [Type: enum2$] // cdb-check: [+0x[...]] __0 : 34 [Type: unsigned char] - +// // cdb-command: dx niche_w_fields_3_niche4,d // cdb-check: niche_w_fields_3_niche4,d : E [Type: enum2$] // cdb-check: [+0x[...]] __0 : 56 [Type: unsigned char] - +// // cdb-command: dx niche_w_fields_3_niche5,d // cdb-check: niche_w_fields_3_niche5,d : F [Type: enum2$] - +// // cdb-command: dx -r3 niche_w_fields_std_result_ok,d // cdb-check: niche_w_fields_std_result_ok,d : Ok [Type: enum2$,alloc::alloc::Global>,u64> >] // cdb-check: [+0x[...]] __0 [Type: alloc::boxed::Box,alloc::alloc::Global>] // cdb-check: [+0x[...]] data_ptr : [...] // cdb-check: [+0x[...]] length : 3 [...] - +// // cdb-command: dx -r3 niche_w_fields_std_result_err,d // cdb-check: niche_w_fields_std_result_err,d : Err [Type: enum2$,alloc::alloc::Global>,u64> >] // cdb-check: [+0x[...]] __0 : 789 [Type: unsigned __int64] - +// // cdb-command: dx -r2 arbitrary_discr1,d // cdb-check: arbitrary_discr1,d : Abc [Type: enum2$] // cdb-check: [+0x[...]] __0 : 1234 [Type: unsigned int] - +// // cdb-command: dx -r2 arbitrary_discr2,d // cdb-check: arbitrary_discr2,d : Def [Type: enum2$] // cdb-check: [+0x[...]] __0 : 5678 [Type: unsigned int] - +#![feature(generic_nonzero)] #![feature(rustc_attrs)] #![feature(repr128)] #![feature(arbitrary_enum_discriminant)] -use std::num::{NonZeroI128, NonZeroU32}; +use std::num::NonZero; pub enum CStyleEnum { Low = 2, @@ -160,7 +160,7 @@ enum NicheLayoutWithFields1<'a> { } enum NicheLayoutWithFields2 { - A(NonZeroU32, u64), + A(NonZero, u64), B(u64), } @@ -210,8 +210,8 @@ fn main() { let j = CStyleEnum::High; let k = Some("IAMA optional string!".to_string()); let l = Result::::Ok(42); - let niche128_some = Some(NonZeroI128::new(123456).unwrap()); - let niche128_none: Option = None; + let niche128_some = NonZero::new(123456i128); + let niche128_none: Option> = None; let wrapping_niche128_untagged = unsafe { Wrapping128Niche::X(Wrapping128(340282366920938463463374607431768211454)) }; @@ -224,7 +224,7 @@ fn main() { let niche_w_fields_1_some = NicheLayoutWithFields1::A(&77, 7); let niche_w_fields_1_none = NicheLayoutWithFields1::B(99); - let niche_w_fields_2_some = NicheLayoutWithFields2::A(NonZeroU32::new(800).unwrap(), 900); + let niche_w_fields_2_some = NicheLayoutWithFields2::A(NonZero::new(800).unwrap(), 900); let niche_w_fields_2_none = NicheLayoutWithFields2::B(1000); let niche_w_fields_3_some = NicheLayoutWithFields3::A(137, true); diff --git a/tests/debuginfo/numeric-types.rs b/tests/debuginfo/numeric-types.rs index e3df1fbc520..74c9e5e1dc3 100644 --- a/tests/debuginfo/numeric-types.rs +++ b/tests/debuginfo/numeric-types.rs @@ -237,25 +237,25 @@ // lldb-command:print nz_usize // lldb-check:[...]$11 = 122 { __0 = 122 } - +#![feature(generic_nonzero)] use std::num::*; use std::sync::atomic::*; fn main() { - let nz_i8 = NonZeroI8::new(11).unwrap(); - let nz_i16 = NonZeroI16::new(22).unwrap(); - let nz_i32 = NonZeroI32::new(33).unwrap(); - let nz_i64 = NonZeroI64::new(44).unwrap(); - let nz_i128 = NonZeroI128::new(55).unwrap(); - let nz_isize = NonZeroIsize::new(66).unwrap(); + let nz_i8 = NonZero::new(11i8).unwrap(); + let nz_i16 = NonZero::new(22i16).unwrap(); + let nz_i32 = NonZero::new(33i32).unwrap(); + let nz_i64 = NonZero::new(44i64).unwrap(); + let nz_i128 = NonZero::new(55i128).unwrap(); + let nz_isize = NonZero::new(66isize).unwrap(); - let nz_u8 = NonZeroU8::new(77).unwrap(); - let nz_u16 = NonZeroU16::new(88).unwrap(); - let nz_u32 = NonZeroU32::new(99).unwrap(); - let nz_u64 = NonZeroU64::new(100).unwrap(); - let nz_u128 = NonZeroU128::new(111).unwrap(); - let nz_usize = NonZeroUsize::new(122).unwrap(); + let nz_u8 = NonZero::new(77u8).unwrap(); + let nz_u16 = NonZero::new(88u16).unwrap(); + let nz_u32 = NonZero::new(99u32).unwrap(); + let nz_u64 = NonZero::new(100u64).unwrap(); + let nz_u128 = NonZero::new(111u128).unwrap(); + let nz_usize = NonZero::new(122usize).unwrap(); let w_i8 = Wrapping(10i8); let w_i16 = Wrapping(20i16); diff --git a/tests/mir-opt/instsimplify/combine_transmutes.rs b/tests/mir-opt/instsimplify/combine_transmutes.rs index 7f45ebf2c86..3707ee17690 100644 --- a/tests/mir-opt/instsimplify/combine_transmutes.rs +++ b/tests/mir-opt/instsimplify/combine_transmutes.rs @@ -1,9 +1,9 @@ //@ unit-test: InstSimplify //@ compile-flags: -C panic=abort - #![crate_type = "lib"] #![feature(core_intrinsics)] #![feature(custom_mir)] +#![feature(generic_nonzero)] use std::intrinsics::mir::*; use std::mem::{MaybeUninit, ManuallyDrop, transmute}; @@ -54,7 +54,7 @@ pub unsafe fn adt_transmutes() { // CHECK: as i32 (Transmute); // CHECK: ({{_.*}}.1: std::mem::ManuallyDrop); - let _a: u8 = transmute(Some(std::num::NonZeroU8::MAX)); + let _a: u8 = transmute(Some(std::num::NonZero::::MAX)); let _a: i16 = transmute(std::num::Wrapping(0_i16)); let _a: u16 = transmute(std::num::Wrapping(0_i16)); let _a: u32 = transmute(Union32 { i32: 0 }); diff --git a/tests/rustdoc/type-layout.rs b/tests/rustdoc/type-layout.rs index 05f8e4dc9e9..b2ff4add63e 100644 --- a/tests/rustdoc/type-layout.rs +++ b/tests/rustdoc/type-layout.rs @@ -81,7 +81,7 @@ pub enum Variants { // @hasraw - 'Some: 4 bytes' pub enum WithNiche { None, - Some(std::num::NonZeroU32), + Some(std::num::NonZero), } // @hasraw type_layout/enum.Uninhabited.html 'Size: ' diff --git a/tests/ui/abi/compatibility.rs b/tests/ui/abi/compatibility.rs index 911438e0d54..fcf31aa970c 100644 --- a/tests/ui/abi/compatibility.rs +++ b/tests/ui/abi/compatibility.rs @@ -64,6 +64,7 @@ [csky] needs-llvm-components: csky */ #![feature(rustc_attrs, unsized_fn_params, transparent_unions)] +#![cfg_attr(host, feature(generic_nonzero))] #![cfg_attr(not(host), feature(no_core, lang_items), no_std, no_core)] #![allow(unused, improper_ctypes_definitions, internal_features)] @@ -74,7 +75,7 @@ #[cfg(host)] use std::{ - any::Any, marker::PhantomData, mem::ManuallyDrop, num::NonZeroI32, ptr::NonNull, rc::Rc, + any::Any, marker::PhantomData, mem::ManuallyDrop, num::NonZero, ptr::NonNull, rc::Rc, sync::Arc, }; @@ -145,7 +146,7 @@ mod prelude { #[repr(transparent)] #[rustc_layout_scalar_valid_range_start(1)] #[rustc_nonnull_optimization_guaranteed] - pub struct NonZeroI32(i32); + pub struct NonZero(T); // This just stands in for a non-trivial type. pub struct Vec { @@ -274,7 +275,7 @@ test_abi_compatible!(isize_int, isize, i64); test_abi_compatible!(zst_unit, Zst, ()); #[cfg(not(any(target_arch = "sparc64")))] test_abi_compatible!(zst_array, Zst, [u8; 0]); -test_abi_compatible!(nonzero_int, NonZeroI32, i32); +test_abi_compatible!(nonzero_int, NonZero, i32); // `DispatchFromDyn` relies on ABI compatibility. // This is interesting since these types are not `repr(transparent)`. So this is not part of our @@ -381,6 +382,6 @@ test_nonnull!(mut_unsized, &mut [i32]); test_nonnull!(fn_, fn()); test_nonnull!(nonnull, NonNull); test_nonnull!(nonnull_unsized, NonNull); -test_nonnull!(non_zero, NonZeroI32); +test_nonnull!(non_zero, NonZero); fn main() {} diff --git a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr index 57815e6af65..c06c3074116 100644 --- a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr @@ -1,5 +1,5 @@ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:22:1 + --> $DIR/raw-bytes.rs:21:1 | LL | const BAD_ENUM: Enum = unsafe { mem::transmute(1usize) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered 0x00000001, but expected a valid enum tag @@ -10,7 +10,7 @@ LL | const BAD_ENUM: Enum = unsafe { mem::transmute(1usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:30:1 + --> $DIR/raw-bytes.rs:29:1 | LL | const BAD_ENUM2: Enum2 = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered 0x00000000, but expected a valid enum tag @@ -21,7 +21,7 @@ LL | const BAD_ENUM2: Enum2 = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:44:1 + --> $DIR/raw-bytes.rs:43:1 | LL | const BAD_UNINHABITED_VARIANT1: UninhDiscriminant = unsafe { mem::transmute(1u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered an uninhabited enum variant @@ -32,7 +32,7 @@ LL | const BAD_UNINHABITED_VARIANT1: UninhDiscriminant = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:46:1 + --> $DIR/raw-bytes.rs:45:1 | LL | const BAD_UNINHABITED_VARIANT2: UninhDiscriminant = unsafe { mem::transmute(3u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered an uninhabited enum variant @@ -43,7 +43,7 @@ LL | const BAD_UNINHABITED_VARIANT2: UninhDiscriminant = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:52:1 + --> $DIR/raw-bytes.rs:51:1 | LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::transmute(!0u32) })); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0.1: encountered 0xffffffff, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`) @@ -54,7 +54,7 @@ LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::tran } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:57:1 + --> $DIR/raw-bytes.rs:56:1 | LL | const NULL_PTR: NonNull = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 @@ -65,10 +65,10 @@ LL | const NULL_PTR: NonNull = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:60:1 + --> $DIR/raw-bytes.rs:59:1 | -LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_U8: NonZero = unsafe { mem::transmute(0u8) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 1, align: 1) { @@ -76,10 +76,10 @@ LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:62:1 + --> $DIR/raw-bytes.rs:61:1 | -LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_USIZE: NonZero = unsafe { mem::transmute(0usize) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 4, align: 4) { @@ -87,7 +87,7 @@ LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:68:1 + --> $DIR/raw-bytes.rs:67:1 | LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 42, but expected something in the range 10..=30 @@ -98,7 +98,7 @@ LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:74:1 + --> $DIR/raw-bytes.rs:73:1 | LL | const BAD_RANGE2: RestrictedRange2 = unsafe { RestrictedRange2(20) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 20, but expected something less or equal to 10, or greater or equal to 30 @@ -109,7 +109,7 @@ LL | const BAD_RANGE2: RestrictedRange2 = unsafe { RestrictedRange2(20) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:77:1 + --> $DIR/raw-bytes.rs:76:1 | LL | const NULL_FAT_PTR: NonNull = unsafe { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 @@ -120,7 +120,7 @@ LL | const NULL_FAT_PTR: NonNull = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:85:1 + --> $DIR/raw-bytes.rs:84:1 | LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) }; | ^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered an unaligned reference (required 2 byte alignment but found 1) @@ -131,7 +131,7 @@ LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:89:1 + --> $DIR/raw-bytes.rs:88:1 | LL | const UNALIGNED_BOX: Box = unsafe { mem::transmute(&[0u8; 4]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered an unaligned box (required 2 byte alignment but found 1) @@ -142,7 +142,7 @@ LL | const UNALIGNED_BOX: Box = unsafe { mem::transmute(&[0u8; 4]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:93:1 + --> $DIR/raw-bytes.rs:92:1 | LL | const NULL: &u16 = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^ constructing invalid value: encountered a null reference @@ -153,7 +153,7 @@ LL | const NULL: &u16 = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:96:1 + --> $DIR/raw-bytes.rs:95:1 | LL | const NULL_BOX: Box = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a null box @@ -164,7 +164,7 @@ LL | const NULL_BOX: Box = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:99:1 + --> $DIR/raw-bytes.rs:98:1 | LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (0x539[noalloc] has no provenance) @@ -175,7 +175,7 @@ LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:102:1 + --> $DIR/raw-bytes.rs:101:1 | LL | const USIZE_AS_BOX: Box = unsafe { mem::transmute(1337usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling box (0x539[noalloc] has no provenance) @@ -186,7 +186,7 @@ LL | const USIZE_AS_BOX: Box = unsafe { mem::transmute(1337usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:105:1 + --> $DIR/raw-bytes.rs:104:1 | LL | const NULL_FN_PTR: fn() = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a function pointer @@ -197,7 +197,7 @@ LL | const NULL_FN_PTR: fn() = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:107:1 + --> $DIR/raw-bytes.rs:106:1 | LL | const DANGLING_FN_PTR: fn() = unsafe { mem::transmute(13usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0xd[noalloc], but expected a function pointer @@ -208,7 +208,7 @@ LL | const DANGLING_FN_PTR: fn() = unsafe { mem::transmute(13usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:109:1 + --> $DIR/raw-bytes.rs:108:1 | LL | const DATA_FN_PTR: fn() = unsafe { mem::transmute(&13) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3, but expected a function pointer @@ -219,7 +219,7 @@ LL | const DATA_FN_PTR: fn() = unsafe { mem::transmute(&13) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:115:1 + --> $DIR/raw-bytes.rs:114:1 | LL | const BAD_BAD_REF: &Bar = unsafe { mem::transmute(1usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a reference pointing to uninhabited type Bar @@ -230,7 +230,7 @@ LL | const BAD_BAD_REF: &Bar = unsafe { mem::transmute(1usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:140:1 + --> $DIR/raw-bytes.rs:139:1 | LL | const STR_TOO_LONG: &str = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (going beyond the bounds of its allocation) @@ -241,7 +241,7 @@ LL | const STR_TOO_LONG: &str = unsafe { mem::transmute((&42u8, 999usize)) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:142:1 + --> $DIR/raw-bytes.rs:141:1 | LL | const NESTED_STR_MUCH_TOO_LONG: (&str,) = (unsafe { mem::transmute((&42, usize::MAX)) },); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered invalid reference metadata: slice is bigger than largest supported object @@ -252,7 +252,7 @@ LL | const NESTED_STR_MUCH_TOO_LONG: (&str,) = (unsafe { mem::transmute((&42, us } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:144:1 + --> $DIR/raw-bytes.rs:143:1 | LL | const MY_STR_MUCH_TOO_LONG: &MyStr = unsafe { mem::transmute((&42u8, usize::MAX)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered invalid reference metadata: slice is bigger than largest supported object @@ -263,7 +263,7 @@ LL | const MY_STR_MUCH_TOO_LONG: &MyStr = unsafe { mem::transmute((&42u8, usize: } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:147:1 + --> $DIR/raw-bytes.rs:146:1 | LL | const STR_NO_INIT: &str = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit:: { uninit: () }]) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered uninitialized memory, but expected a string @@ -274,7 +274,7 @@ LL | const STR_NO_INIT: &str = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit: } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:149:1 + --> $DIR/raw-bytes.rs:148:1 | LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit:: { uninit: () }]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered uninitialized memory, but expected a string @@ -285,7 +285,7 @@ LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[MaybeUni } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:151:1 + --> $DIR/raw-bytes.rs:150:1 | LL | const MYSTR_NO_INIT_ISSUE83182: &MyStr = unsafe { mem::transmute::<&[_], _>(&[&()]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered a pointer, but expected a string @@ -298,7 +298,7 @@ LL | const MYSTR_NO_INIT_ISSUE83182: &MyStr = unsafe { mem::transmute::<&[_], _> = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:155:1 + --> $DIR/raw-bytes.rs:154:1 | LL | const SLICE_TOO_LONG: &[u8] = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (going beyond the bounds of its allocation) @@ -309,7 +309,7 @@ LL | const SLICE_TOO_LONG: &[u8] = unsafe { mem::transmute((&42u8, 999usize)) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:157:1 + --> $DIR/raw-bytes.rs:156:1 | LL | const SLICE_TOO_LONG_OVERFLOW: &[u32] = unsafe { mem::transmute((&42u32, isize::MAX)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered invalid reference metadata: slice is bigger than largest supported object @@ -320,7 +320,7 @@ LL | const SLICE_TOO_LONG_OVERFLOW: &[u32] = unsafe { mem::transmute((&42u32, is } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:160:1 + --> $DIR/raw-bytes.rs:159:1 | LL | const SLICE_TOO_LONG_BOX: Box<[u8]> = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling box (going beyond the bounds of its allocation) @@ -331,7 +331,7 @@ LL | const SLICE_TOO_LONG_BOX: Box<[u8]> = unsafe { mem::transmute((&42u8, 999us } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:163:1 + --> $DIR/raw-bytes.rs:162:1 | LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x03, but expected a boolean @@ -342,13 +342,13 @@ LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:163:40 + --> $DIR/raw-bytes.rs:162:40 | LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:169:1 + --> $DIR/raw-bytes.rs:168:1 | LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3u8) }, [false]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered 0x03, but expected a boolean @@ -359,13 +359,13 @@ LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3 } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:169:42 + --> $DIR/raw-bytes.rs:168:42 | LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3u8) }, [false]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:173:1 + --> $DIR/raw-bytes.rs:172:1 | LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::transmute(3u8) }]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..1[0]: encountered 0x03, but expected a boolean @@ -376,13 +376,13 @@ LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::tran } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:173:42 + --> $DIR/raw-bytes.rs:172:42 | LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::transmute(3u8) }]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:178:1 + --> $DIR/raw-bytes.rs:177:1 | LL | const TRAIT_OBJ_SHORT_VTABLE_1: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &3u8))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC17, but expected a vtable pointer @@ -393,7 +393,7 @@ LL | const TRAIT_OBJ_SHORT_VTABLE_1: W<&dyn Trait> = unsafe { mem::transmute(W(( } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:182:1 + --> $DIR/raw-bytes.rs:181:1 | LL | const TRAIT_OBJ_SHORT_VTABLE_2: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &3u64))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC19, but expected a vtable pointer @@ -404,7 +404,7 @@ LL | const TRAIT_OBJ_SHORT_VTABLE_2: W<&dyn Trait> = unsafe { mem::transmute(W(( } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:186:1 + --> $DIR/raw-bytes.rs:185:1 | LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, 4usize))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered 0x4[noalloc], but expected a vtable pointer @@ -415,7 +415,7 @@ LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:189:1 + --> $DIR/raw-bytes.rs:188:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC22, but expected a vtable pointer @@ -426,7 +426,7 @@ LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::trans } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:193:1 + --> $DIR/raw-bytes.rs:192:1 | LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool>(&3u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..: encountered 0x03, but expected a boolean @@ -437,7 +437,7 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:197:1 + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer @@ -448,7 +448,7 @@ LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:199:1 + --> $DIR/raw-bytes.rs:198:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27, but expected a vtable pointer @@ -459,7 +459,7 @@ LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transm } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:203:1 + --> $DIR/raw-bytes.rs:202:1 | LL | const _: &[!; 1] = unsafe { &*(1_usize as *const [!; 1]) }; | ^^^^^^^^^^^^^^^^ constructing invalid value: encountered a reference pointing to uninhabited type [!; 1] @@ -470,7 +470,7 @@ LL | const _: &[!; 1] = unsafe { &*(1_usize as *const [!; 1]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:204:1 + --> $DIR/raw-bytes.rs:203:1 | LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 1]) }; | ^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a value of the never type `!` @@ -481,7 +481,7 @@ LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 1]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:205:1 + --> $DIR/raw-bytes.rs:204:1 | LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 42]) }; | ^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a value of the never type `!` @@ -492,7 +492,7 @@ LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 42]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:209:1 + --> $DIR/raw-bytes.rs:208:1 | LL | pub static S4: &[u8] = unsafe { from_raw_parts((&D1) as *const _ as _, 1) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered uninitialized memory, but expected an integer @@ -503,7 +503,7 @@ LL | pub static S4: &[u8] = unsafe { from_raw_parts((&D1) as *const _ as _, 1) } } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:212:1 + --> $DIR/raw-bytes.rs:211:1 | LL | pub static S5: &[u8] = unsafe { from_raw_parts((&D3) as *const _ as _, mem::size_of::<&u32>()) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a pointer, but expected an integer @@ -516,7 +516,7 @@ LL | pub static S5: &[u8] = unsafe { from_raw_parts((&D3) as *const _ as _, mem: = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:215:1 + --> $DIR/raw-bytes.rs:214:1 | LL | pub static S6: &[bool] = unsafe { from_raw_parts((&D0) as *const _ as _, 4) }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x11, but expected a boolean @@ -527,7 +527,7 @@ LL | pub static S6: &[bool] = unsafe { from_raw_parts((&D0) as *const _ as _, 4) } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:219:1 + --> $DIR/raw-bytes.rs:218:1 | LL | pub static S7: &[u16] = unsafe { | ^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[1]: encountered uninitialized memory, but expected an integer @@ -538,7 +538,7 @@ LL | pub static S7: &[u16] = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:226:1 + --> $DIR/raw-bytes.rs:225:1 | LL | pub static R4: &[u8] = unsafe { | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered uninitialized memory, but expected an integer @@ -549,7 +549,7 @@ LL | pub static R4: &[u8] = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:231:1 + --> $DIR/raw-bytes.rs:230:1 | LL | pub static R5: &[u8] = unsafe { | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a pointer, but expected an integer @@ -562,7 +562,7 @@ LL | pub static R5: &[u8] = unsafe { = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:236:1 + --> $DIR/raw-bytes.rs:235:1 | LL | pub static R6: &[bool] = unsafe { | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x11, but expected a boolean diff --git a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr index c875d91ccb8..0589280524c 100644 --- a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr @@ -1,5 +1,5 @@ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:22:1 + --> $DIR/raw-bytes.rs:21:1 | LL | const BAD_ENUM: Enum = unsafe { mem::transmute(1usize) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered 0x0000000000000001, but expected a valid enum tag @@ -10,7 +10,7 @@ LL | const BAD_ENUM: Enum = unsafe { mem::transmute(1usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:30:1 + --> $DIR/raw-bytes.rs:29:1 | LL | const BAD_ENUM2: Enum2 = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered 0x0000000000000000, but expected a valid enum tag @@ -21,7 +21,7 @@ LL | const BAD_ENUM2: Enum2 = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:44:1 + --> $DIR/raw-bytes.rs:43:1 | LL | const BAD_UNINHABITED_VARIANT1: UninhDiscriminant = unsafe { mem::transmute(1u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered an uninhabited enum variant @@ -32,7 +32,7 @@ LL | const BAD_UNINHABITED_VARIANT1: UninhDiscriminant = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:46:1 + --> $DIR/raw-bytes.rs:45:1 | LL | const BAD_UNINHABITED_VARIANT2: UninhDiscriminant = unsafe { mem::transmute(3u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered an uninhabited enum variant @@ -43,7 +43,7 @@ LL | const BAD_UNINHABITED_VARIANT2: UninhDiscriminant = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:52:1 + --> $DIR/raw-bytes.rs:51:1 | LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::transmute(!0u32) })); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0.1: encountered 0xffffffff, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`) @@ -54,7 +54,7 @@ LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::tran } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:57:1 + --> $DIR/raw-bytes.rs:56:1 | LL | const NULL_PTR: NonNull = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 @@ -65,10 +65,10 @@ LL | const NULL_PTR: NonNull = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:60:1 + --> $DIR/raw-bytes.rs:59:1 | -LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_U8: NonZero = unsafe { mem::transmute(0u8) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 1, align: 1) { @@ -76,10 +76,10 @@ LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:62:1 + --> $DIR/raw-bytes.rs:61:1 | -LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_USIZE: NonZero = unsafe { mem::transmute(0usize) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 8) { @@ -87,7 +87,7 @@ LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:68:1 + --> $DIR/raw-bytes.rs:67:1 | LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 42, but expected something in the range 10..=30 @@ -98,7 +98,7 @@ LL | const BAD_RANGE1: RestrictedRange1 = unsafe { RestrictedRange1(42) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:74:1 + --> $DIR/raw-bytes.rs:73:1 | LL | const BAD_RANGE2: RestrictedRange2 = unsafe { RestrictedRange2(20) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 20, but expected something less or equal to 10, or greater or equal to 30 @@ -109,7 +109,7 @@ LL | const BAD_RANGE2: RestrictedRange2 = unsafe { RestrictedRange2(20) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:77:1 + --> $DIR/raw-bytes.rs:76:1 | LL | const NULL_FAT_PTR: NonNull = unsafe { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 @@ -120,7 +120,7 @@ LL | const NULL_FAT_PTR: NonNull = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:85:1 + --> $DIR/raw-bytes.rs:84:1 | LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) }; | ^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered an unaligned reference (required 2 byte alignment but found 1) @@ -131,7 +131,7 @@ LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:89:1 + --> $DIR/raw-bytes.rs:88:1 | LL | const UNALIGNED_BOX: Box = unsafe { mem::transmute(&[0u8; 4]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered an unaligned box (required 2 byte alignment but found 1) @@ -142,7 +142,7 @@ LL | const UNALIGNED_BOX: Box = unsafe { mem::transmute(&[0u8; 4]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:93:1 + --> $DIR/raw-bytes.rs:92:1 | LL | const NULL: &u16 = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^ constructing invalid value: encountered a null reference @@ -153,7 +153,7 @@ LL | const NULL: &u16 = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:96:1 + --> $DIR/raw-bytes.rs:95:1 | LL | const NULL_BOX: Box = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a null box @@ -164,7 +164,7 @@ LL | const NULL_BOX: Box = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:99:1 + --> $DIR/raw-bytes.rs:98:1 | LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (0x539[noalloc] has no provenance) @@ -175,7 +175,7 @@ LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:102:1 + --> $DIR/raw-bytes.rs:101:1 | LL | const USIZE_AS_BOX: Box = unsafe { mem::transmute(1337usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling box (0x539[noalloc] has no provenance) @@ -186,7 +186,7 @@ LL | const USIZE_AS_BOX: Box = unsafe { mem::transmute(1337usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:105:1 + --> $DIR/raw-bytes.rs:104:1 | LL | const NULL_FN_PTR: fn() = unsafe { mem::transmute(0usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a function pointer @@ -197,7 +197,7 @@ LL | const NULL_FN_PTR: fn() = unsafe { mem::transmute(0usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:107:1 + --> $DIR/raw-bytes.rs:106:1 | LL | const DANGLING_FN_PTR: fn() = unsafe { mem::transmute(13usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0xd[noalloc], but expected a function pointer @@ -208,7 +208,7 @@ LL | const DANGLING_FN_PTR: fn() = unsafe { mem::transmute(13usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:109:1 + --> $DIR/raw-bytes.rs:108:1 | LL | const DATA_FN_PTR: fn() = unsafe { mem::transmute(&13) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3, but expected a function pointer @@ -219,7 +219,7 @@ LL | const DATA_FN_PTR: fn() = unsafe { mem::transmute(&13) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:115:1 + --> $DIR/raw-bytes.rs:114:1 | LL | const BAD_BAD_REF: &Bar = unsafe { mem::transmute(1usize) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a reference pointing to uninhabited type Bar @@ -230,7 +230,7 @@ LL | const BAD_BAD_REF: &Bar = unsafe { mem::transmute(1usize) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:140:1 + --> $DIR/raw-bytes.rs:139:1 | LL | const STR_TOO_LONG: &str = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (going beyond the bounds of its allocation) @@ -241,7 +241,7 @@ LL | const STR_TOO_LONG: &str = unsafe { mem::transmute((&42u8, 999usize)) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:142:1 + --> $DIR/raw-bytes.rs:141:1 | LL | const NESTED_STR_MUCH_TOO_LONG: (&str,) = (unsafe { mem::transmute((&42, usize::MAX)) },); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered invalid reference metadata: slice is bigger than largest supported object @@ -252,7 +252,7 @@ LL | const NESTED_STR_MUCH_TOO_LONG: (&str,) = (unsafe { mem::transmute((&42, us } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:144:1 + --> $DIR/raw-bytes.rs:143:1 | LL | const MY_STR_MUCH_TOO_LONG: &MyStr = unsafe { mem::transmute((&42u8, usize::MAX)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered invalid reference metadata: slice is bigger than largest supported object @@ -263,7 +263,7 @@ LL | const MY_STR_MUCH_TOO_LONG: &MyStr = unsafe { mem::transmute((&42u8, usize: } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:147:1 + --> $DIR/raw-bytes.rs:146:1 | LL | const STR_NO_INIT: &str = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit:: { uninit: () }]) }; | ^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .: encountered uninitialized memory, but expected a string @@ -274,7 +274,7 @@ LL | const STR_NO_INIT: &str = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit: } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:149:1 + --> $DIR/raw-bytes.rs:148:1 | LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[MaybeUninit:: { uninit: () }]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered uninitialized memory, but expected a string @@ -285,7 +285,7 @@ LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[MaybeUni } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:151:1 + --> $DIR/raw-bytes.rs:150:1 | LL | const MYSTR_NO_INIT_ISSUE83182: &MyStr = unsafe { mem::transmute::<&[_], _>(&[&()]) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered a pointer, but expected a string @@ -298,7 +298,7 @@ LL | const MYSTR_NO_INIT_ISSUE83182: &MyStr = unsafe { mem::transmute::<&[_], _> = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:155:1 + --> $DIR/raw-bytes.rs:154:1 | LL | const SLICE_TOO_LONG: &[u8] = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling reference (going beyond the bounds of its allocation) @@ -309,7 +309,7 @@ LL | const SLICE_TOO_LONG: &[u8] = unsafe { mem::transmute((&42u8, 999usize)) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:157:1 + --> $DIR/raw-bytes.rs:156:1 | LL | const SLICE_TOO_LONG_OVERFLOW: &[u32] = unsafe { mem::transmute((&42u32, isize::MAX)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered invalid reference metadata: slice is bigger than largest supported object @@ -320,7 +320,7 @@ LL | const SLICE_TOO_LONG_OVERFLOW: &[u32] = unsafe { mem::transmute((&42u32, is } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:160:1 + --> $DIR/raw-bytes.rs:159:1 | LL | const SLICE_TOO_LONG_BOX: Box<[u8]> = unsafe { mem::transmute((&42u8, 999usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered a dangling box (going beyond the bounds of its allocation) @@ -331,7 +331,7 @@ LL | const SLICE_TOO_LONG_BOX: Box<[u8]> = unsafe { mem::transmute((&42u8, 999us } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:163:1 + --> $DIR/raw-bytes.rs:162:1 | LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x03, but expected a boolean @@ -342,13 +342,13 @@ LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:163:40 + --> $DIR/raw-bytes.rs:162:40 | LL | const SLICE_CONTENT_INVALID: &[bool] = &[unsafe { mem::transmute(3u8) }]; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:169:1 + --> $DIR/raw-bytes.rs:168:1 | LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3u8) }, [false]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..0: encountered 0x03, but expected a boolean @@ -359,13 +359,13 @@ LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3 } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:169:42 + --> $DIR/raw-bytes.rs:168:42 | LL | const MYSLICE_PREFIX_BAD: &MySliceBool = &MySlice(unsafe { mem::transmute(3u8) }, [false]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:173:1 + --> $DIR/raw-bytes.rs:172:1 | LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::transmute(3u8) }]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..1[0]: encountered 0x03, but expected a boolean @@ -376,13 +376,13 @@ LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::tran } note: erroneous constant encountered - --> $DIR/raw-bytes.rs:173:42 + --> $DIR/raw-bytes.rs:172:42 | LL | const MYSLICE_SUFFIX_BAD: &MySliceBool = &MySlice(true, [unsafe { mem::transmute(3u8) }]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:178:1 + --> $DIR/raw-bytes.rs:177:1 | LL | const TRAIT_OBJ_SHORT_VTABLE_1: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &3u8))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC17, but expected a vtable pointer @@ -393,7 +393,7 @@ LL | const TRAIT_OBJ_SHORT_VTABLE_1: W<&dyn Trait> = unsafe { mem::transmute(W(( } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:182:1 + --> $DIR/raw-bytes.rs:181:1 | LL | const TRAIT_OBJ_SHORT_VTABLE_2: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &3u64))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC19, but expected a vtable pointer @@ -404,7 +404,7 @@ LL | const TRAIT_OBJ_SHORT_VTABLE_2: W<&dyn Trait> = unsafe { mem::transmute(W(( } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:186:1 + --> $DIR/raw-bytes.rs:185:1 | LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, 4usize))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered 0x4[noalloc], but expected a vtable pointer @@ -415,7 +415,7 @@ LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:189:1 + --> $DIR/raw-bytes.rs:188:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC22, but expected a vtable pointer @@ -426,7 +426,7 @@ LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::trans } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:193:1 + --> $DIR/raw-bytes.rs:192:1 | LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool>(&3u8) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at ..: encountered 0x03, but expected a boolean @@ -437,7 +437,7 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:197:1 + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer @@ -448,7 +448,7 @@ LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:199:1 + --> $DIR/raw-bytes.rs:198:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27, but expected a vtable pointer @@ -459,7 +459,7 @@ LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transm } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:203:1 + --> $DIR/raw-bytes.rs:202:1 | LL | const _: &[!; 1] = unsafe { &*(1_usize as *const [!; 1]) }; | ^^^^^^^^^^^^^^^^ constructing invalid value: encountered a reference pointing to uninhabited type [!; 1] @@ -470,7 +470,7 @@ LL | const _: &[!; 1] = unsafe { &*(1_usize as *const [!; 1]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:204:1 + --> $DIR/raw-bytes.rs:203:1 | LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 1]) }; | ^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a value of the never type `!` @@ -481,7 +481,7 @@ LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 1]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:205:1 + --> $DIR/raw-bytes.rs:204:1 | LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 42]) }; | ^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a value of the never type `!` @@ -492,7 +492,7 @@ LL | const _: &[!] = unsafe { &*(1_usize as *const [!; 42]) }; } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:209:1 + --> $DIR/raw-bytes.rs:208:1 | LL | pub static S4: &[u8] = unsafe { from_raw_parts((&D1) as *const _ as _, 1) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered uninitialized memory, but expected an integer @@ -503,7 +503,7 @@ LL | pub static S4: &[u8] = unsafe { from_raw_parts((&D1) as *const _ as _, 1) } } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:212:1 + --> $DIR/raw-bytes.rs:211:1 | LL | pub static S5: &[u8] = unsafe { from_raw_parts((&D3) as *const _ as _, mem::size_of::<&u32>()) }; | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a pointer, but expected an integer @@ -516,7 +516,7 @@ LL | pub static S5: &[u8] = unsafe { from_raw_parts((&D3) as *const _ as _, mem: = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:215:1 + --> $DIR/raw-bytes.rs:214:1 | LL | pub static S6: &[bool] = unsafe { from_raw_parts((&D0) as *const _ as _, 4) }; | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x11, but expected a boolean @@ -527,7 +527,7 @@ LL | pub static S6: &[bool] = unsafe { from_raw_parts((&D0) as *const _ as _, 4) } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:219:1 + --> $DIR/raw-bytes.rs:218:1 | LL | pub static S7: &[u16] = unsafe { | ^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[1]: encountered uninitialized memory, but expected an integer @@ -538,7 +538,7 @@ LL | pub static S7: &[u16] = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:226:1 + --> $DIR/raw-bytes.rs:225:1 | LL | pub static R4: &[u8] = unsafe { | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered uninitialized memory, but expected an integer @@ -549,7 +549,7 @@ LL | pub static R4: &[u8] = unsafe { } error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:231:1 + --> $DIR/raw-bytes.rs:230:1 | LL | pub static R5: &[u8] = unsafe { | ^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered a pointer, but expected an integer @@ -562,7 +562,7 @@ LL | pub static R5: &[u8] = unsafe { = help: the absolute address of a pointer is not known at compile-time, so such operations are not supported error[E0080]: it is undefined behavior to use this value - --> $DIR/raw-bytes.rs:236:1 + --> $DIR/raw-bytes.rs:235:1 | LL | pub static R6: &[bool] = unsafe { | ^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .[0]: encountered 0x11, but expected a boolean diff --git a/tests/ui/consts/const-eval/raw-bytes.rs b/tests/ui/consts/const-eval/raw-bytes.rs index 96903b322e4..e5dfd5ca293 100644 --- a/tests/ui/consts/const-eval/raw-bytes.rs +++ b/tests/ui/consts/const-eval/raw-bytes.rs @@ -2,14 +2,13 @@ //@ ignore-endian-big // ignore-tidy-linelength //@ normalize-stderr-test "╾─*ALLOC[0-9]+(\+[a-z0-9]+)?()?─*╼" -> "╾ALLOC_ID$1╼" - -#![feature(never_type, rustc_attrs, ptr_metadata, slice_from_ptr_range, const_slice_from_ptr_range)] #![allow(invalid_value)] +#![feature(generic_nonzero, never_type, rustc_attrs, ptr_metadata, slice_from_ptr_range, const_slice_from_ptr_range)] use std::mem; use std::alloc::Layout; use std::ptr::NonNull; -use std::num::{NonZeroU8, NonZeroUsize}; +use std::num::NonZero; use std::slice::{from_ptr_range, from_raw_parts}; // # Bad enums and chars @@ -57,9 +56,9 @@ const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::transmute const NULL_PTR: NonNull = unsafe { mem::transmute(0usize) }; //~^ ERROR it is undefined behavior to use this value -const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; +const NULL_U8: NonZero = unsafe { mem::transmute(0u8) }; //~^ ERROR it is undefined behavior to use this value -const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; +const NULL_USIZE: NonZero = unsafe { mem::transmute(0usize) }; //~^ ERROR it is undefined behavior to use this value #[rustc_layout_scalar_valid_range_start(10)] diff --git a/tests/ui/consts/const-eval/ub-nonnull.rs b/tests/ui/consts/const-eval/ub-nonnull.rs index 229ce9a7df3..76bd5248ffd 100644 --- a/tests/ui/consts/const-eval/ub-nonnull.rs +++ b/tests/ui/consts/const-eval/ub-nonnull.rs @@ -1,12 +1,12 @@ // Strip out raw byte dumps to make comparison platform-independent: //@ normalize-stderr-test "(the raw bytes of the constant) \(size: [0-9]*, align: [0-9]*\)" -> "$1 (size: $$SIZE, align: $$ALIGN)" //@ normalize-stderr-test "([0-9a-f][0-9a-f] |╾─*ALLOC[0-9]+(\+[a-z0-9]+)?─*╼ )+ *│.*" -> "HEX_DUMP" -#![feature(rustc_attrs, ptr_metadata)] #![allow(invalid_value)] // make sure we cannot allow away the errors tested here +#![feature(generic_nonzero, rustc_attrs, ptr_metadata)] use std::mem; use std::ptr::NonNull; -use std::num::{NonZeroU8, NonZeroUsize}; +use std::num::NonZero; const NON_NULL: NonNull = unsafe { mem::transmute(1usize) }; const NON_NULL_PTR: NonNull = unsafe { mem::transmute(&1) }; @@ -21,9 +21,9 @@ const OUT_OF_BOUNDS_PTR: NonNull = { unsafe { mem::transmute(out_of_bounds_ptr) } }; -const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; +const NULL_U8: NonZero = unsafe { mem::transmute(0u8) }; //~^ ERROR it is undefined behavior to use this value -const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; +const NULL_USIZE: NonZero = unsafe { mem::transmute(0usize) }; //~^ ERROR it is undefined behavior to use this value #[repr(C)] @@ -31,7 +31,7 @@ union MaybeUninit { uninit: (), init: T, } -const UNINIT: NonZeroU8 = unsafe { MaybeUninit { uninit: () }.init }; +const UNINIT: NonZero = unsafe { MaybeUninit { uninit: () }.init }; //~^ ERROR evaluation of constant value failed //~| uninitialized diff --git a/tests/ui/consts/const-eval/ub-nonnull.stderr b/tests/ui/consts/const-eval/ub-nonnull.stderr index 7822306b654..70b961fe1cd 100644 --- a/tests/ui/consts/const-eval/ub-nonnull.stderr +++ b/tests/ui/consts/const-eval/ub-nonnull.stderr @@ -18,8 +18,8 @@ LL | let out_of_bounds_ptr = &ptr[255]; error[E0080]: it is undefined behavior to use this value --> $DIR/ub-nonnull.rs:24:1 | -LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_U8: NonZero = unsafe { mem::transmute(0u8) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { @@ -29,8 +29,8 @@ LL | const NULL_U8: NonZeroU8 = unsafe { mem::transmute(0u8) }; error[E0080]: it is undefined behavior to use this value --> $DIR/ub-nonnull.rs:26:1 | -LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 +LL | const NULL_USIZE: NonZero = unsafe { mem::transmute(0usize) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered 0, but expected something greater or equal to 1 | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { @@ -38,10 +38,10 @@ LL | const NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(0usize) }; } error[E0080]: evaluation of constant value failed - --> $DIR/ub-nonnull.rs:34:36 + --> $DIR/ub-nonnull.rs:34:38 | -LL | const UNINIT: NonZeroU8 = unsafe { MaybeUninit { uninit: () }.init }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory +LL | const UNINIT: NonZero = unsafe { MaybeUninit { uninit: () }.init }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory error[E0080]: it is undefined behavior to use this value --> $DIR/ub-nonnull.rs:43:1 diff --git a/tests/ui/consts/const-eval/valid-const.rs b/tests/ui/consts/const-eval/valid-const.rs index 1c8c048ae28..15d3e883456 100644 --- a/tests/ui/consts/const-eval/valid-const.rs +++ b/tests/ui/consts/const-eval/valid-const.rs @@ -1,16 +1,17 @@ //@ check-pass - +// // Some constants that *are* valid +#![feature(generic_nonzero)] use std::mem; use std::ptr::NonNull; -use std::num::{NonZeroU8, NonZeroUsize}; +use std::num::NonZero; const NON_NULL_PTR1: NonNull = unsafe { mem::transmute(1usize) }; const NON_NULL_PTR2: NonNull = unsafe { mem::transmute(&0) }; -const NON_NULL_U8: NonZeroU8 = unsafe { mem::transmute(1u8) }; -const NON_NULL_USIZE: NonZeroUsize = unsafe { mem::transmute(1usize) }; +const NON_NULL_U8: NonZero = unsafe { mem::transmute(1u8) }; +const NON_NULL_USIZE: NonZero = unsafe { mem::transmute(1usize) }; const UNIT: () = (); diff --git a/tests/ui/consts/ice-48279.rs b/tests/ui/consts/ice-48279.rs index 5316974b80a..a4f687a3e9c 100644 --- a/tests/ui/consts/ice-48279.rs +++ b/tests/ui/consts/ice-48279.rs @@ -15,7 +15,7 @@ impl NonZeroU32 { } } -//pub const FOO_ATOM: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(7) }; +// pub const FOO_ATOM: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(7) }; pub const FOO_ATOM: NonZeroU32 = unsafe { NonZeroU32 { value: 7 } }; fn main() { diff --git a/tests/ui/consts/tuple-struct-constructors.rs b/tests/ui/consts/tuple-struct-constructors.rs index 8472a09844b..d2f25aeec9b 100644 --- a/tests/ui/consts/tuple-struct-constructors.rs +++ b/tests/ui/consts/tuple-struct-constructors.rs @@ -1,10 +1,11 @@ //@ run-pass - +// // https://github.com/rust-lang/rust/issues/41898 +#![feature(generic_nonzero)] -use std::num::NonZeroU64; +use std::num::NonZero; fn main() { - const FOO: NonZeroU64 = unsafe { NonZeroU64::new_unchecked(2) }; + const FOO: NonZero = unsafe { NonZero::new_unchecked(2) }; if let FOO = FOO {} } diff --git a/tests/ui/intrinsics/panic-uninitialized-zeroed.rs b/tests/ui/intrinsics/panic-uninitialized-zeroed.rs index 2420aec5058..fb3b0652ddb 100644 --- a/tests/ui/intrinsics/panic-uninitialized-zeroed.rs +++ b/tests/ui/intrinsics/panic-uninitialized-zeroed.rs @@ -4,11 +4,11 @@ // ignore-tidy-linelength //@ ignore-emscripten spawning processes is not supported //@ ignore-sgx no processes - +// // This test checks panic emitted from `mem::{uninitialized,zeroed}`. - -#![feature(never_type)] #![allow(deprecated, invalid_value)] +#![feature(generic_nonzero)] +#![feature(never_type)] use std::{ mem::{self, MaybeUninit, ManuallyDrop}, @@ -29,7 +29,7 @@ enum OneVariant { Variant(i32) } #[allow(dead_code, non_camel_case_types)] enum OneVariant_NonZero { - Variant(i32, i32, num::NonZeroI32), + Variant(i32, i32, num::NonZero), DeadVariant(Bar), } @@ -55,8 +55,8 @@ enum LR { } #[allow(dead_code, non_camel_case_types)] enum LR_NonZero { - Left(num::NonZeroI64), - Right(num::NonZeroI64), + Left(num::NonZero), + Right(num::NonZero), } struct ZeroSized; diff --git a/tests/ui/issues/issue-64593.rs b/tests/ui/issues/issue-64593.rs index e5535381006..091c3a2f316 100644 --- a/tests/ui/issues/issue-64593.rs +++ b/tests/ui/issues/issue-64593.rs @@ -1,7 +1,8 @@ //@ check-pass #![deny(improper_ctypes)] +#![feature(generic_nonzero)] -pub struct Error(std::num::NonZeroU32); +pub struct Error(std::num::NonZero); extern "Rust" { fn foo(dest: &mut [u8]) -> Result<(), Error>; diff --git a/tests/ui/layout/unsafe-cell-hides-niche.rs b/tests/ui/layout/unsafe-cell-hides-niche.rs index b3158839de0..568eb819be2 100644 --- a/tests/ui/layout/unsafe-cell-hides-niche.rs +++ b/tests/ui/layout/unsafe-cell-hides-niche.rs @@ -1,17 +1,17 @@ // For rust-lang/rust#68303: the contents of `UnsafeCell` cannot // participate in the niche-optimization for enum discriminants. This -// test checks that an `Option>` has the same +// test checks that an `Option>>` has the same // size in memory as an `Option>` (namely, 8 bytes). - +// //@ check-pass //@ compile-flags: --crate-type=lib //@ only-x86 - +#![feature(generic_nonzero)] #![feature(repr_simd)] use std::cell::{UnsafeCell, RefCell, Cell}; use std::mem::size_of; -use std::num::NonZeroU32 as N32; +use std::num::NonZero; use std::sync::{Mutex, RwLock}; struct Wrapper(#[allow(dead_code)] T); @@ -54,15 +54,17 @@ macro_rules! check_sizes { const PTR_SIZE: usize = std::mem::size_of::<*const ()>(); -check_sizes!(Wrapper: 4 => 8); -check_sizes!(Wrapper: 4 => 4); // (✓ niche opt) -check_sizes!(Transparent: 4 => 8); -check_sizes!(Transparent: 4 => 4); // (✓ niche opt) -check_sizes!(NoNiche: 4 => 8); -check_sizes!(NoNiche: 4 => 8); +check_sizes!(Wrapper: 4 => 8); +check_sizes!(Wrapper>: 4 => 4); // (✓ niche opt) -check_sizes!(UnsafeCell: 4 => 8); -check_sizes!(UnsafeCell: 4 => 8); +check_sizes!(Transparent: 4 => 8); +check_sizes!(Transparent>: 4 => 4); // (✓ niche opt) + +check_sizes!(NoNiche: 4 => 8); +check_sizes!(NoNiche>: 4 => 8); + +check_sizes!(UnsafeCell: 4 => 8); +check_sizes!(UnsafeCell>: 4 => 8); check_sizes!(UnsafeCell<&()>: PTR_SIZE => PTR_SIZE * 2); check_sizes!( RefCell<&()>: PTR_SIZE * 2 => PTR_SIZE * 3); @@ -79,4 +81,4 @@ check_sizes!(UnsafeCell<&dyn Trait>: PTR_SIZE * 2 => PTR_SIZE * 3); #[repr(simd)] pub struct Vec4([T; 4]); -check_sizes!(UnsafeCell>: 16 => 32); +check_sizes!(UnsafeCell>>: 16 => 32); diff --git a/tests/ui/layout/zero-sized-array-enum-niche.rs b/tests/ui/layout/zero-sized-array-enum-niche.rs index 095afc4337a..058f5923487 100644 --- a/tests/ui/layout/zero-sized-array-enum-niche.rs +++ b/tests/ui/layout/zero-sized-array-enum-niche.rs @@ -1,6 +1,7 @@ //@ normalize-stderr-test "pref: Align\([1-8] bytes\)" -> "pref: $$PREF_ALIGN" -#![feature(rustc_attrs)] #![crate_type = "lib"] +#![feature(generic_nonzero)] +#![feature(rustc_attrs)] // Various tests around the behavior of zero-sized arrays and // enum niches, especially that they have coherent size and alignment. @@ -34,7 +35,7 @@ enum MultipleAlignments { //~ ERROR: layout_of struct Packed(T); #[rustc_layout(debug)] -type NicheLosesToTagged = Result<[u32; 0], Packed>; //~ ERROR: layout_of +type NicheLosesToTagged = Result<[u32; 0], Packed>>; //~ ERROR: layout_of // Should get tag_encoding: Direct, size == align == 4. #[repr(u16)] diff --git a/tests/ui/layout/zero-sized-array-enum-niche.stderr b/tests/ui/layout/zero-sized-array-enum-niche.stderr index 0ed743818c5..af049125de4 100644 --- a/tests/ui/layout/zero-sized-array-enum-niche.stderr +++ b/tests/ui/layout/zero-sized-array-enum-niche.stderr @@ -98,7 +98,7 @@ error: layout_of(Result<[u32; 0], bool>) = Layout { max_repr_align: None, unadjusted_abi_align: Align(4 bytes), } - --> $DIR/zero-sized-array-enum-niche.rs:13:1 + --> $DIR/zero-sized-array-enum-niche.rs:14:1 | LL | type AlignedResult = Result<[u32; 0], bool>; | ^^^^^^^^^^^^^^^^^^ @@ -227,7 +227,7 @@ error: layout_of(MultipleAlignments) = Layout { max_repr_align: None, unadjusted_abi_align: Align(4 bytes), } - --> $DIR/zero-sized-array-enum-niche.rs:21:1 + --> $DIR/zero-sized-array-enum-niche.rs:22:1 | LL | enum MultipleAlignments { | ^^^^^^^^^^^^^^^^^^^^^^^ @@ -332,9 +332,9 @@ error: layout_of(Result<[u32; 0], Packed>>) = Layout { max_repr_align: None, unadjusted_abi_align: Align(4 bytes), } - --> $DIR/zero-sized-array-enum-niche.rs:37:1 + --> $DIR/zero-sized-array-enum-niche.rs:38:1 | -LL | type NicheLosesToTagged = Result<[u32; 0], Packed>; +LL | type NicheLosesToTagged = Result<[u32; 0], Packed>>; | ^^^^^^^^^^^^^^^^^^^^^^^ error: layout_of(Result<[u32; 0], Packed>) = Layout { @@ -441,7 +441,7 @@ error: layout_of(Result<[u32; 0], Packed>) = Layout { max_repr_align: None, unadjusted_abi_align: Align(4 bytes), } - --> $DIR/zero-sized-array-enum-niche.rs:44:1 + --> $DIR/zero-sized-array-enum-niche.rs:45:1 | LL | type NicheWinsOverTagged = Result<[u32; 0], Packed>; | ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/lint/clashing-extern-fn.rs b/tests/ui/lint/clashing-extern-fn.rs index ce027c82554..cb63af0ea42 100644 --- a/tests/ui/lint/clashing-extern-fn.rs +++ b/tests/ui/lint/clashing-extern-fn.rs @@ -2,6 +2,7 @@ //@ aux-build:external_extern_fn.rs #![crate_type = "lib"] #![warn(clashing_extern_declarations)] +#![feature(generic_nonzero)] mod redeclared_different_signature { mod a { @@ -265,7 +266,7 @@ mod missing_return_type { mod non_zero_and_non_null { mod a { extern "C" { - fn non_zero_usize() -> core::num::NonZeroUsize; + fn non_zero_usize() -> core::num::NonZero; fn non_null_ptr() -> core::ptr::NonNull; } } @@ -285,36 +286,33 @@ mod non_zero_and_non_null { // See #75739 mod non_zero_transparent { mod a1 { - use std::num::NonZeroUsize; extern "C" { - fn f1() -> NonZeroUsize; + fn f1() -> std::num::NonZero; } } mod b1 { #[repr(transparent)] - struct X(NonZeroUsize); - use std::num::NonZeroUsize; + struct X(std::num::NonZero); + extern "C" { fn f1() -> X; } } mod a2 { - use std::num::NonZeroUsize; extern "C" { - fn f2() -> NonZeroUsize; + fn f2() -> std::num::NonZero; } } mod b2 { #[repr(transparent)] - struct X1(NonZeroUsize); + struct X1(std::num::NonZero); #[repr(transparent)] struct X(X1); - use std::num::NonZeroUsize; extern "C" { // Same case as above, but with two layers of newtyping. fn f2() -> X; @@ -325,7 +323,6 @@ mod non_zero_transparent { #[repr(transparent)] struct X(core::ptr::NonNull); - use std::num::NonZeroUsize; extern "C" { fn f3() -> X; } @@ -340,7 +337,7 @@ mod non_zero_transparent { mod a4 { #[repr(transparent)] enum E { - X(std::num::NonZeroUsize), + X(std::num::NonZero), } extern "C" { fn f4() -> E; @@ -349,7 +346,7 @@ mod non_zero_transparent { mod b4 { extern "C" { - fn f4() -> std::num::NonZeroUsize; + fn f4() -> std::num::NonZero; } } } @@ -369,8 +366,8 @@ mod null_optimised_enums { extern "C" { // This should be allowed, because these conversions are guaranteed to be FFI-safe (see // #60300) - fn option_non_zero_usize() -> Option; - fn option_non_zero_isize() -> Option; + fn option_non_zero_usize() -> Option>; + fn option_non_zero_isize() -> Option>; fn option_non_null_ptr() -> Option>; // However, these should be incorrect (note isize instead of usize) @@ -415,16 +412,16 @@ mod hidden_niche { } mod b { use std::cell::UnsafeCell; - use std::num::NonZeroUsize; + use std::num::NonZero; #[repr(transparent)] struct Transparent { - x: NonZeroUsize, + x: NonZero, } #[repr(transparent)] struct TransparentNoNiche { - y: UnsafeCell, + y: UnsafeCell>, } extern "C" { @@ -434,7 +431,7 @@ mod hidden_niche { //~^ WARN redeclared with a different signature //~| WARN block uses type `Option`, which is not FFI-safe - fn hidden_niche_unsafe_cell() -> Option>; + fn hidden_niche_unsafe_cell() -> Option>>; //~^ WARN redeclared with a different signature //~| WARN block uses type `Option>>`, which is not FFI-safe } diff --git a/tests/ui/lint/clashing-extern-fn.stderr b/tests/ui/lint/clashing-extern-fn.stderr index 5b9244b6993..86ee789aeb2 100644 --- a/tests/ui/lint/clashing-extern-fn.stderr +++ b/tests/ui/lint/clashing-extern-fn.stderr @@ -1,5 +1,5 @@ warning: `extern` block uses type `Option`, which is not FFI-safe - --> $DIR/clashing-extern-fn.rs:433:55 + --> $DIR/clashing-extern-fn.rs:430:55 | LL | fn hidden_niche_transparent_no_niche() -> Option; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe @@ -9,16 +9,16 @@ LL | fn hidden_niche_transparent_no_niche() -> Option>>`, which is not FFI-safe - --> $DIR/clashing-extern-fn.rs:437:46 + --> $DIR/clashing-extern-fn.rs:434:46 | -LL | fn hidden_niche_unsafe_cell() -> Option>; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn hidden_niche_unsafe_cell() -> Option>>; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint warning: `clash` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:14:13 + --> $DIR/clashing-extern-fn.rs:15:13 | LL | fn clash(x: u8); | --------------- `clash` previously declared here @@ -35,7 +35,7 @@ LL | #![warn(clashing_extern_declarations)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ warning: `extern_link_name` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:52:9 + --> $DIR/clashing-extern-fn.rs:53:9 | LL | #[link_name = "extern_link_name"] | --------------------------------- `extern_link_name` previously declared here @@ -47,7 +47,7 @@ LL | fn extern_link_name(x: u32); found `unsafe extern "C" fn(u32)` warning: `some_other_extern_link_name` redeclares `some_other_new_name` with a different signature - --> $DIR/clashing-extern-fn.rs:55:9 + --> $DIR/clashing-extern-fn.rs:56:9 | LL | fn some_other_new_name(x: i16); | ------------------------------ `some_other_new_name` previously declared here @@ -59,7 +59,7 @@ LL | #[link_name = "some_other_new_name"] found `unsafe extern "C" fn(u32)` warning: `other_both_names_different` redeclares `link_name_same` with a different signature - --> $DIR/clashing-extern-fn.rs:59:9 + --> $DIR/clashing-extern-fn.rs:60:9 | LL | #[link_name = "link_name_same"] | ------------------------------- `link_name_same` previously declared here @@ -71,7 +71,7 @@ LL | #[link_name = "link_name_same"] found `unsafe extern "C" fn(u32)` warning: `different_mod` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:72:9 + --> $DIR/clashing-extern-fn.rs:73:9 | LL | fn different_mod(x: u8); | ----------------------- `different_mod` previously declared here @@ -83,7 +83,7 @@ LL | fn different_mod(x: u64); found `unsafe extern "C" fn(u64)` warning: `variadic_decl` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:82:9 + --> $DIR/clashing-extern-fn.rs:83:9 | LL | fn variadic_decl(x: u8, ...); | ---------------------------- `variadic_decl` previously declared here @@ -95,7 +95,7 @@ LL | fn variadic_decl(x: u8); found `unsafe extern "C" fn(u8)` warning: `weigh_banana` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:142:13 + --> $DIR/clashing-extern-fn.rs:143:13 | LL | fn weigh_banana(count: *const Banana) -> u64; | -------------------------------------------- `weigh_banana` previously declared here @@ -107,7 +107,7 @@ LL | fn weigh_banana(count: *const Banana) -> u64; found `unsafe extern "C" fn(*const three::Banana) -> u64` warning: `draw_point` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:171:13 + --> $DIR/clashing-extern-fn.rs:172:13 | LL | fn draw_point(p: Point); | ----------------------- `draw_point` previously declared here @@ -119,7 +119,7 @@ LL | fn draw_point(p: Point); found `unsafe extern "C" fn(sameish_members::b::Point)` warning: `origin` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:197:13 + --> $DIR/clashing-extern-fn.rs:198:13 | LL | fn origin() -> Point3; | --------------------- `origin` previously declared here @@ -131,7 +131,7 @@ LL | fn origin() -> Point3; found `unsafe extern "C" fn() -> same_sized_members_clash::b::Point3` warning: `transparent_incorrect` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:220:13 + --> $DIR/clashing-extern-fn.rs:221:13 | LL | fn transparent_incorrect() -> T; | ------------------------------- `transparent_incorrect` previously declared here @@ -143,7 +143,7 @@ LL | fn transparent_incorrect() -> isize; found `unsafe extern "C" fn() -> isize` warning: `missing_return_type` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:259:13 + --> $DIR/clashing-extern-fn.rs:260:13 | LL | fn missing_return_type() -> usize; | --------------------------------- `missing_return_type` previously declared here @@ -155,10 +155,10 @@ LL | fn missing_return_type(); found `unsafe extern "C" fn()` warning: `non_zero_usize` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:277:13 + --> $DIR/clashing-extern-fn.rs:278:13 | -LL | fn non_zero_usize() -> core::num::NonZeroUsize; - | ---------------------------------------------- `non_zero_usize` previously declared here +LL | fn non_zero_usize() -> core::num::NonZero; + | ------------------------------------------------ `non_zero_usize` previously declared here ... LL | fn non_zero_usize() -> usize; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration @@ -167,7 +167,7 @@ LL | fn non_zero_usize() -> usize; found `unsafe extern "C" fn() -> usize` warning: `non_null_ptr` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:279:13 + --> $DIR/clashing-extern-fn.rs:280:13 | LL | fn non_null_ptr() -> core::ptr::NonNull; | ---------------------------------------------- `non_null_ptr` previously declared here @@ -179,7 +179,7 @@ LL | fn non_null_ptr() -> *const usize; found `unsafe extern "C" fn() -> *const usize` warning: `option_non_zero_usize_incorrect` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:377:13 + --> $DIR/clashing-extern-fn.rs:374:13 | LL | fn option_non_zero_usize_incorrect() -> usize; | --------------------------------------------- `option_non_zero_usize_incorrect` previously declared here @@ -191,7 +191,7 @@ LL | fn option_non_zero_usize_incorrect() -> isize; found `unsafe extern "C" fn() -> isize` warning: `option_non_null_ptr_incorrect` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:379:13 + --> $DIR/clashing-extern-fn.rs:376:13 | LL | fn option_non_null_ptr_incorrect() -> *const usize; | -------------------------------------------------- `option_non_null_ptr_incorrect` previously declared here @@ -203,7 +203,7 @@ LL | fn option_non_null_ptr_incorrect() -> *const isize; found `unsafe extern "C" fn() -> *const isize` warning: `hidden_niche_transparent_no_niche` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:433:13 + --> $DIR/clashing-extern-fn.rs:430:13 | LL | fn hidden_niche_transparent_no_niche() -> usize; | ----------------------------------------------- `hidden_niche_transparent_no_niche` previously declared here @@ -215,13 +215,13 @@ LL | fn hidden_niche_transparent_no_niche() -> Option Option` warning: `hidden_niche_unsafe_cell` redeclared with a different signature - --> $DIR/clashing-extern-fn.rs:437:13 + --> $DIR/clashing-extern-fn.rs:434:13 | LL | fn hidden_niche_unsafe_cell() -> usize; | -------------------------------------- `hidden_niche_unsafe_cell` previously declared here ... -LL | fn hidden_niche_unsafe_cell() -> Option>; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration +LL | fn hidden_niche_unsafe_cell() -> Option>>; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> usize` found `unsafe extern "C" fn() -> Option>>` diff --git a/tests/ui/lint/invalid_value.rs b/tests/ui/lint/invalid_value.rs index 57d8cbe7c93..1d2f23aaaf6 100644 --- a/tests/ui/lint/invalid_value.rs +++ b/tests/ui/lint/invalid_value.rs @@ -1,13 +1,12 @@ // This test checks that calling `mem::{uninitialized,zeroed}` with certain types results // in a lint. - -#![feature(never_type, rustc_attrs)] #![allow(deprecated)] #![deny(invalid_value)] +#![feature(generic_nonzero, never_type, rustc_attrs)] use std::mem::{self, MaybeUninit}; use std::ptr::NonNull; -use std::num::NonZeroU32; +use std::num::NonZero; enum Void {} @@ -36,7 +35,7 @@ enum OneFruit { enum OneFruitNonZero { Apple(!), - Banana(NonZeroU32), + Banana(NonZero), } enum TwoUninhabited { @@ -92,8 +91,8 @@ fn main() { let _val: NonNull = mem::zeroed(); //~ ERROR: does not permit zero-initialization let _val: NonNull = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized - let _val: (NonZeroU32, i32) = mem::zeroed(); //~ ERROR: does not permit zero-initialization - let _val: (NonZeroU32, i32) = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized + let _val: (NonZero, i32) = mem::zeroed(); //~ ERROR: does not permit zero-initialization + let _val: (NonZero, i32) = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized let _val: *const dyn Send = mem::zeroed(); //~ ERROR: does not permit zero-initialization let _val: *const dyn Send = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized @@ -151,7 +150,7 @@ fn main() { // Transmute-from-0 let _val: &'static i32 = mem::transmute(0usize); //~ ERROR: does not permit zero-initialization let _val: &'static [i32] = mem::transmute((0usize, 0usize)); //~ ERROR: does not permit zero-initialization - let _val: NonZeroU32 = mem::transmute(0); //~ ERROR: does not permit zero-initialization + let _val: NonZero = mem::transmute(0); //~ ERROR: does not permit zero-initialization // `MaybeUninit` cases let _val: NonNull = MaybeUninit::zeroed().assume_init(); //~ ERROR: does not permit zero-initialization diff --git a/tests/ui/lint/invalid_value.stderr b/tests/ui/lint/invalid_value.stderr index bdf47343114..955d01bd5d9 100644 --- a/tests/ui/lint/invalid_value.stderr +++ b/tests/ui/lint/invalid_value.stderr @@ -1,5 +1,5 @@ error: the type `&T` does not permit zero-initialization - --> $DIR/invalid_value.rs:54:32 + --> $DIR/invalid_value.rs:53:32 | LL | let _val: &'static T = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -9,13 +9,13 @@ LL | let _val: &'static T = mem::zeroed(); | = note: references must be non-null note: the lint level is defined here - --> $DIR/invalid_value.rs:6:9 + --> $DIR/invalid_value.rs:4:9 | LL | #![deny(invalid_value)] | ^^^^^^^^^^^^^ error: the type `&T` does not permit being left uninitialized - --> $DIR/invalid_value.rs:55:32 + --> $DIR/invalid_value.rs:54:32 | LL | let _val: &'static T = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -26,7 +26,7 @@ LL | let _val: &'static T = mem::uninitialized(); = note: references must be non-null error: the type `Wrap<&T>` does not permit zero-initialization - --> $DIR/invalid_value.rs:57:38 + --> $DIR/invalid_value.rs:56:38 | LL | let _val: Wrap<&'static T> = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -36,13 +36,13 @@ LL | let _val: Wrap<&'static T> = mem::zeroed(); | = note: `Wrap<&T>` must be non-null note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ error: the type `Wrap<&T>` does not permit being left uninitialized - --> $DIR/invalid_value.rs:58:38 + --> $DIR/invalid_value.rs:57:38 | LL | let _val: Wrap<&'static T> = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -52,13 +52,13 @@ LL | let _val: Wrap<&'static T> = mem::uninitialized(); | = note: `Wrap<&T>` must be non-null note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ error: the type `!` does not permit zero-initialization - --> $DIR/invalid_value.rs:65:23 + --> $DIR/invalid_value.rs:64:23 | LL | let _val: ! = mem::zeroed(); | ^^^^^^^^^^^^^ this code causes undefined behavior when executed @@ -66,7 +66,7 @@ LL | let _val: ! = mem::zeroed(); = note: the `!` type has no valid value error: the type `!` does not permit being left uninitialized - --> $DIR/invalid_value.rs:66:23 + --> $DIR/invalid_value.rs:65:23 | LL | let _val: ! = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed @@ -74,7 +74,7 @@ LL | let _val: ! = mem::uninitialized(); = note: the `!` type has no valid value error: the type `(i32, !)` does not permit zero-initialization - --> $DIR/invalid_value.rs:68:30 + --> $DIR/invalid_value.rs:67:30 | LL | let _val: (i32, !) = mem::zeroed(); | ^^^^^^^^^^^^^ this code causes undefined behavior when executed @@ -82,7 +82,7 @@ LL | let _val: (i32, !) = mem::zeroed(); = note: the `!` type has no valid value error: the type `(i32, !)` does not permit being left uninitialized - --> $DIR/invalid_value.rs:69:30 + --> $DIR/invalid_value.rs:68:30 | LL | let _val: (i32, !) = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed @@ -90,31 +90,31 @@ LL | let _val: (i32, !) = mem::uninitialized(); = note: integers must be initialized error: the type `Void` does not permit zero-initialization - --> $DIR/invalid_value.rs:71:26 + --> $DIR/invalid_value.rs:70:26 | LL | let _val: Void = mem::zeroed(); | ^^^^^^^^^^^^^ this code causes undefined behavior when executed | note: enums with no inhabited variants have no valid value - --> $DIR/invalid_value.rs:12:1 + --> $DIR/invalid_value.rs:11:1 | LL | enum Void {} | ^^^^^^^^^ error: the type `Void` does not permit being left uninitialized - --> $DIR/invalid_value.rs:72:26 + --> $DIR/invalid_value.rs:71:26 | LL | let _val: Void = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed | note: enums with no inhabited variants have no valid value - --> $DIR/invalid_value.rs:12:1 + --> $DIR/invalid_value.rs:11:1 | LL | enum Void {} | ^^^^^^^^^ error: the type `&i32` does not permit zero-initialization - --> $DIR/invalid_value.rs:74:34 + --> $DIR/invalid_value.rs:73:34 | LL | let _val: &'static i32 = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -125,7 +125,7 @@ LL | let _val: &'static i32 = mem::zeroed(); = note: references must be non-null error: the type `&i32` does not permit being left uninitialized - --> $DIR/invalid_value.rs:75:34 + --> $DIR/invalid_value.rs:74:34 | LL | let _val: &'static i32 = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -136,7 +136,7 @@ LL | let _val: &'static i32 = mem::uninitialized(); = note: references must be non-null error: the type `Ref` does not permit zero-initialization - --> $DIR/invalid_value.rs:77:25 + --> $DIR/invalid_value.rs:76:25 | LL | let _val: Ref = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -146,13 +146,13 @@ LL | let _val: Ref = mem::zeroed(); | = note: `Ref` must be non-null note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:14:12 + --> $DIR/invalid_value.rs:13:12 | LL | struct Ref(&'static i32); | ^^^^^^^^^^^^ error: the type `Ref` does not permit being left uninitialized - --> $DIR/invalid_value.rs:78:25 + --> $DIR/invalid_value.rs:77:25 | LL | let _val: Ref = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -162,13 +162,13 @@ LL | let _val: Ref = mem::uninitialized(); | = note: `Ref` must be non-null note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:14:12 + --> $DIR/invalid_value.rs:13:12 | LL | struct Ref(&'static i32); | ^^^^^^^^^^^^ error: the type `fn()` does not permit zero-initialization - --> $DIR/invalid_value.rs:80:26 + --> $DIR/invalid_value.rs:79:26 | LL | let _val: fn() = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -179,7 +179,7 @@ LL | let _val: fn() = mem::zeroed(); = note: function pointers must be non-null error: the type `fn()` does not permit being left uninitialized - --> $DIR/invalid_value.rs:81:26 + --> $DIR/invalid_value.rs:80:26 | LL | let _val: fn() = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -190,7 +190,7 @@ LL | let _val: fn() = mem::uninitialized(); = note: function pointers must be non-null error: the type `Wrap` does not permit zero-initialization - --> $DIR/invalid_value.rs:83:32 + --> $DIR/invalid_value.rs:82:32 | LL | let _val: Wrap = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -200,13 +200,13 @@ LL | let _val: Wrap = mem::zeroed(); | = note: `Wrap` must be non-null note: because function pointers must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ error: the type `Wrap` does not permit being left uninitialized - --> $DIR/invalid_value.rs:84:32 + --> $DIR/invalid_value.rs:83:32 | LL | let _val: Wrap = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -216,13 +216,13 @@ LL | let _val: Wrap = mem::uninitialized(); | = note: `Wrap` must be non-null note: because function pointers must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ error: the type `WrapEnum` does not permit zero-initialization - --> $DIR/invalid_value.rs:86:36 + --> $DIR/invalid_value.rs:85:36 | LL | let _val: WrapEnum = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -232,13 +232,13 @@ LL | let _val: WrapEnum = mem::zeroed(); | = note: `WrapEnum` must be non-null note: because function pointers must be non-null (in this field of the only potentially inhabited enum variant) - --> $DIR/invalid_value.rs:18:28 + --> $DIR/invalid_value.rs:17:28 | LL | enum WrapEnum { Wrapped(T) } | ^ error: the type `WrapEnum` does not permit being left uninitialized - --> $DIR/invalid_value.rs:87:36 + --> $DIR/invalid_value.rs:86:36 | LL | let _val: WrapEnum = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -248,13 +248,13 @@ LL | let _val: WrapEnum = mem::uninitialized(); | = note: `WrapEnum` must be non-null note: because function pointers must be non-null (in this field of the only potentially inhabited enum variant) - --> $DIR/invalid_value.rs:18:28 + --> $DIR/invalid_value.rs:17:28 | LL | enum WrapEnum { Wrapped(T) } | ^ error: the type `Wrap<(RefPair, i32)>` does not permit zero-initialization - --> $DIR/invalid_value.rs:89:42 + --> $DIR/invalid_value.rs:88:42 | LL | let _val: Wrap<(RefPair, i32)> = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -263,18 +263,18 @@ LL | let _val: Wrap<(RefPair, i32)> = mem::zeroed(); | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | note: `RefPair` must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:15:16 + --> $DIR/invalid_value.rs:14:16 | LL | struct RefPair((&'static i32, i32)); | ^^^^^^^^^^^^^^^^^^^ error: the type `Wrap<(RefPair, i32)>` does not permit being left uninitialized - --> $DIR/invalid_value.rs:90:42 + --> $DIR/invalid_value.rs:89:42 | LL | let _val: Wrap<(RefPair, i32)> = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -283,18 +283,18 @@ LL | let _val: Wrap<(RefPair, i32)> = mem::uninitialized(); | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | note: `RefPair` must be non-null (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ note: because references must be non-null (in this struct field) - --> $DIR/invalid_value.rs:15:16 + --> $DIR/invalid_value.rs:14:16 | LL | struct RefPair((&'static i32, i32)); | ^^^^^^^^^^^^^^^^^^^ error: the type `NonNull` does not permit zero-initialization - --> $DIR/invalid_value.rs:92:34 + --> $DIR/invalid_value.rs:91:34 | LL | let _val: NonNull = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -305,7 +305,7 @@ LL | let _val: NonNull = mem::zeroed(); = note: `std::ptr::NonNull` must be non-null error: the type `NonNull` does not permit being left uninitialized - --> $DIR/invalid_value.rs:93:34 + --> $DIR/invalid_value.rs:92:34 | LL | let _val: NonNull = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -317,30 +317,30 @@ LL | let _val: NonNull = mem::uninitialized(); = note: raw pointers must be initialized error: the type `(NonZero, i32)` does not permit zero-initialization - --> $DIR/invalid_value.rs:95:39 + --> $DIR/invalid_value.rs:94:41 | -LL | let _val: (NonZeroU32, i32) = mem::zeroed(); - | ^^^^^^^^^^^^^ - | | - | this code causes undefined behavior when executed - | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done +LL | let _val: (NonZero, i32) = mem::zeroed(); + | ^^^^^^^^^^^^^ + | | + | this code causes undefined behavior when executed + | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | = note: `std::num::NonZero` must be non-null error: the type `(NonZero, i32)` does not permit being left uninitialized - --> $DIR/invalid_value.rs:96:39 + --> $DIR/invalid_value.rs:95:41 | -LL | let _val: (NonZeroU32, i32) = mem::uninitialized(); - | ^^^^^^^^^^^^^^^^^^^^ - | | - | this code causes undefined behavior when executed - | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done +LL | let _val: (NonZero, i32) = mem::uninitialized(); + | ^^^^^^^^^^^^^^^^^^^^ + | | + | this code causes undefined behavior when executed + | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | = note: `std::num::NonZero` must be non-null = note: integers must be initialized error: the type `*const dyn Send` does not permit zero-initialization - --> $DIR/invalid_value.rs:98:37 + --> $DIR/invalid_value.rs:97:37 | LL | let _val: *const dyn Send = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -351,7 +351,7 @@ LL | let _val: *const dyn Send = mem::zeroed(); = note: the vtable of a wide raw pointer must be non-null error: the type `*const dyn Send` does not permit being left uninitialized - --> $DIR/invalid_value.rs:99:37 + --> $DIR/invalid_value.rs:98:37 | LL | let _val: *const dyn Send = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -362,7 +362,7 @@ LL | let _val: *const dyn Send = mem::uninitialized(); = note: the vtable of a wide raw pointer must be non-null error: the type `[fn(); 2]` does not permit zero-initialization - --> $DIR/invalid_value.rs:101:31 + --> $DIR/invalid_value.rs:100:31 | LL | let _val: [fn(); 2] = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -373,7 +373,7 @@ LL | let _val: [fn(); 2] = mem::zeroed(); = note: function pointers must be non-null error: the type `[fn(); 2]` does not permit being left uninitialized - --> $DIR/invalid_value.rs:102:31 + --> $DIR/invalid_value.rs:101:31 | LL | let _val: [fn(); 2] = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -384,31 +384,31 @@ LL | let _val: [fn(); 2] = mem::uninitialized(); = note: function pointers must be non-null error: the type `TwoUninhabited` does not permit zero-initialization - --> $DIR/invalid_value.rs:104:36 + --> $DIR/invalid_value.rs:103:36 | LL | let _val: TwoUninhabited = mem::zeroed(); | ^^^^^^^^^^^^^ this code causes undefined behavior when executed | note: enums with no inhabited variants have no valid value - --> $DIR/invalid_value.rs:42:1 + --> $DIR/invalid_value.rs:41:1 | LL | enum TwoUninhabited { | ^^^^^^^^^^^^^^^^^^^ error: the type `TwoUninhabited` does not permit being left uninitialized - --> $DIR/invalid_value.rs:105:36 + --> $DIR/invalid_value.rs:104:36 | LL | let _val: TwoUninhabited = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ this code causes undefined behavior when executed | note: enums with no inhabited variants have no valid value - --> $DIR/invalid_value.rs:42:1 + --> $DIR/invalid_value.rs:41:1 | LL | enum TwoUninhabited { | ^^^^^^^^^^^^^^^^^^^ error: the type `OneFruitNonZero` does not permit zero-initialization - --> $DIR/invalid_value.rs:107:37 + --> $DIR/invalid_value.rs:106:37 | LL | let _val: OneFruitNonZero = mem::zeroed(); | ^^^^^^^^^^^^^ @@ -418,13 +418,13 @@ LL | let _val: OneFruitNonZero = mem::zeroed(); | = note: `OneFruitNonZero` must be non-null note: because `std::num::NonZero` must be non-null (in this field of the only potentially inhabited enum variant) - --> $DIR/invalid_value.rs:39:12 + --> $DIR/invalid_value.rs:38:12 | -LL | Banana(NonZeroU32), - | ^^^^^^^^^^ +LL | Banana(NonZero), + | ^^^^^^^^^^^^ error: the type `OneFruitNonZero` does not permit being left uninitialized - --> $DIR/invalid_value.rs:108:37 + --> $DIR/invalid_value.rs:107:37 | LL | let _val: OneFruitNonZero = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -434,14 +434,14 @@ LL | let _val: OneFruitNonZero = mem::uninitialized(); | = note: `OneFruitNonZero` must be non-null note: because `std::num::NonZero` must be non-null (in this field of the only potentially inhabited enum variant) - --> $DIR/invalid_value.rs:39:12 + --> $DIR/invalid_value.rs:38:12 | -LL | Banana(NonZeroU32), - | ^^^^^^^^^^ +LL | Banana(NonZero), + | ^^^^^^^^^^^^ = note: integers must be initialized error: the type `bool` does not permit being left uninitialized - --> $DIR/invalid_value.rs:112:26 + --> $DIR/invalid_value.rs:111:26 | LL | let _val: bool = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -452,7 +452,7 @@ LL | let _val: bool = mem::uninitialized(); = note: booleans must be either `true` or `false` error: the type `Wrap` does not permit being left uninitialized - --> $DIR/invalid_value.rs:115:32 + --> $DIR/invalid_value.rs:114:32 | LL | let _val: Wrap = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -462,13 +462,13 @@ LL | let _val: Wrap = mem::uninitialized(); | = note: `Wrap` must be initialized inside its custom valid range note: characters must be a valid Unicode codepoint (in this struct field) - --> $DIR/invalid_value.rs:17:18 + --> $DIR/invalid_value.rs:16:18 | LL | struct Wrap { wrapped: T } | ^^^^^^^^^^ error: the type `NonBig` does not permit being left uninitialized - --> $DIR/invalid_value.rs:118:28 + --> $DIR/invalid_value.rs:117:28 | LL | let _val: NonBig = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -478,13 +478,13 @@ LL | let _val: NonBig = mem::uninitialized(); | = note: `NonBig` must be initialized inside its custom valid range note: integers must be initialized (in this struct field) - --> $DIR/invalid_value.rs:23:26 + --> $DIR/invalid_value.rs:22:26 | LL | pub(crate) struct NonBig(u64); | ^^^ error: the type `Fruit` does not permit being left uninitialized - --> $DIR/invalid_value.rs:121:27 + --> $DIR/invalid_value.rs:120:27 | LL | let _val: Fruit = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -493,13 +493,13 @@ LL | let _val: Fruit = mem::uninitialized(); | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | note: enums with multiple inhabited variants have to be initialized to a variant - --> $DIR/invalid_value.rs:26:1 + --> $DIR/invalid_value.rs:25:1 | LL | enum Fruit { | ^^^^^^^^^^ error: the type `[bool; 2]` does not permit being left uninitialized - --> $DIR/invalid_value.rs:124:31 + --> $DIR/invalid_value.rs:123:31 | LL | let _val: [bool; 2] = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -510,7 +510,7 @@ LL | let _val: [bool; 2] = mem::uninitialized(); = note: booleans must be either `true` or `false` error: the type `i32` does not permit being left uninitialized - --> $DIR/invalid_value.rs:127:25 + --> $DIR/invalid_value.rs:126:25 | LL | let _val: i32 = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -521,7 +521,7 @@ LL | let _val: i32 = mem::uninitialized(); = note: integers must be initialized error: the type `f32` does not permit being left uninitialized - --> $DIR/invalid_value.rs:130:25 + --> $DIR/invalid_value.rs:129:25 | LL | let _val: f32 = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -532,7 +532,7 @@ LL | let _val: f32 = mem::uninitialized(); = note: floats must be initialized error: the type `*const ()` does not permit being left uninitialized - --> $DIR/invalid_value.rs:133:31 + --> $DIR/invalid_value.rs:132:31 | LL | let _val: *const () = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -543,7 +543,7 @@ LL | let _val: *const () = mem::uninitialized(); = note: raw pointers must be initialized error: the type `*const [()]` does not permit being left uninitialized - --> $DIR/invalid_value.rs:136:33 + --> $DIR/invalid_value.rs:135:33 | LL | let _val: *const [()] = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -554,7 +554,7 @@ LL | let _val: *const [()] = mem::uninitialized(); = note: raw pointers must be initialized error: the type `WrapAroundRange` does not permit being left uninitialized - --> $DIR/invalid_value.rs:139:37 + --> $DIR/invalid_value.rs:138:37 | LL | let _val: WrapAroundRange = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -564,13 +564,13 @@ LL | let _val: WrapAroundRange = mem::uninitialized(); | = note: `WrapAroundRange` must be initialized inside its custom valid range note: integers must be initialized (in this struct field) - --> $DIR/invalid_value.rs:49:35 + --> $DIR/invalid_value.rs:48:35 | LL | pub(crate) struct WrapAroundRange(u8); | ^^ error: the type `Result` does not permit being left uninitialized - --> $DIR/invalid_value.rs:144:38 + --> $DIR/invalid_value.rs:143:38 | LL | let _val: Result = mem::uninitialized(); | ^^^^^^^^^^^^^^^^^^^^ @@ -582,7 +582,7 @@ note: enums with multiple inhabited variants have to be initialized to a variant --> $SRC_DIR/core/src/result.rs:LL:COL error: the type `&i32` does not permit zero-initialization - --> $DIR/invalid_value.rs:152:34 + --> $DIR/invalid_value.rs:151:34 | LL | let _val: &'static i32 = mem::transmute(0usize); | ^^^^^^^^^^^^^^^^^^^^^^ @@ -593,7 +593,7 @@ LL | let _val: &'static i32 = mem::transmute(0usize); = note: references must be non-null error: the type `&[i32]` does not permit zero-initialization - --> $DIR/invalid_value.rs:153:36 + --> $DIR/invalid_value.rs:152:36 | LL | let _val: &'static [i32] = mem::transmute((0usize, 0usize)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -604,18 +604,18 @@ LL | let _val: &'static [i32] = mem::transmute((0usize, 0usize)); = note: references must be non-null error: the type `NonZero` does not permit zero-initialization - --> $DIR/invalid_value.rs:154:32 + --> $DIR/invalid_value.rs:153:34 | -LL | let _val: NonZeroU32 = mem::transmute(0); - | ^^^^^^^^^^^^^^^^^ - | | - | this code causes undefined behavior when executed - | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done +LL | let _val: NonZero = mem::transmute(0); + | ^^^^^^^^^^^^^^^^^ + | | + | this code causes undefined behavior when executed + | help: use `MaybeUninit` instead, and only call `assume_init` after initialization is done | = note: `std::num::NonZero` must be non-null error: the type `NonNull` does not permit zero-initialization - --> $DIR/invalid_value.rs:157:34 + --> $DIR/invalid_value.rs:156:34 | LL | let _val: NonNull = MaybeUninit::zeroed().assume_init(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -626,7 +626,7 @@ LL | let _val: NonNull = MaybeUninit::zeroed().assume_init(); = note: `std::ptr::NonNull` must be non-null error: the type `NonNull` does not permit being left uninitialized - --> $DIR/invalid_value.rs:158:34 + --> $DIR/invalid_value.rs:157:34 | LL | let _val: NonNull = MaybeUninit::uninit().assume_init(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -638,7 +638,7 @@ LL | let _val: NonNull = MaybeUninit::uninit().assume_init(); = note: raw pointers must be initialized error: the type `bool` does not permit being left uninitialized - --> $DIR/invalid_value.rs:159:26 + --> $DIR/invalid_value.rs:158:26 | LL | let _val: bool = MaybeUninit::uninit().assume_init(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/lint/lint-ctypes-enum.rs b/tests/ui/lint/lint-ctypes-enum.rs index 7c206080593..3157b6e240a 100644 --- a/tests/ui/lint/lint-ctypes-enum.rs +++ b/tests/ui/lint/lint-ctypes-enum.rs @@ -1,7 +1,8 @@ -#![feature(transparent_unions)] -#![feature(ptr_internals)] -#![deny(improper_ctypes)] #![allow(dead_code)] +#![deny(improper_ctypes)] +#![feature(generic_nonzero)] +#![feature(ptr_internals)] +#![feature(transparent_unions)] use std::num; @@ -67,26 +68,26 @@ extern "C" { fn option_fn(x: Option); fn nonnull(x: Option>); fn unique(x: Option>); - fn nonzero_u8(x: Option); - fn nonzero_u16(x: Option); - fn nonzero_u32(x: Option); - fn nonzero_u64(x: Option); - fn nonzero_u128(x: Option); + fn nonzero_u8(x: Option>); + fn nonzero_u16(x: Option>); + fn nonzero_u32(x: Option>); + fn nonzero_u64(x: Option>); + fn nonzero_u128(x: Option>); //~^ ERROR `extern` block uses type `u128` - fn nonzero_usize(x: Option); - fn nonzero_i8(x: Option); - fn nonzero_i16(x: Option); - fn nonzero_i32(x: Option); - fn nonzero_i64(x: Option); - fn nonzero_i128(x: Option); + fn nonzero_usize(x: Option>); + fn nonzero_i8(x: Option>); + fn nonzero_i16(x: Option>); + fn nonzero_i32(x: Option>); + fn nonzero_i64(x: Option>); + fn nonzero_i128(x: Option>); //~^ ERROR `extern` block uses type `i128` - fn nonzero_isize(x: Option); - fn transparent_struct(x: Option>); - fn transparent_enum(x: Option>); - fn transparent_union(x: Option>); + fn nonzero_isize(x: Option>); + fn transparent_struct(x: Option>>); + fn transparent_enum(x: Option>>); + fn transparent_union(x: Option>>); //~^ ERROR `extern` block uses type - fn repr_rust(x: Option>); //~ ERROR `extern` block uses type - fn no_result(x: Result<(), num::NonZeroI32>); //~ ERROR `extern` block uses type + fn repr_rust(x: Option>>); //~ ERROR `extern` block uses type + fn no_result(x: Result<(), num::NonZero>); //~ ERROR `extern` block uses type } pub fn main() {} diff --git a/tests/ui/lint/lint-ctypes-enum.stderr b/tests/ui/lint/lint-ctypes-enum.stderr index 64beefbb757..48be3eb5a56 100644 --- a/tests/ui/lint/lint-ctypes-enum.stderr +++ b/tests/ui/lint/lint-ctypes-enum.stderr @@ -1,5 +1,5 @@ error: `extern` block uses type `U`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:60:13 + --> $DIR/lint-ctypes-enum.rs:61:13 | LL | fn uf(x: U); | ^ not FFI-safe @@ -7,18 +7,18 @@ LL | fn uf(x: U); = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint note: the type is defined here - --> $DIR/lint-ctypes-enum.rs:9:1 + --> $DIR/lint-ctypes-enum.rs:10:1 | LL | enum U { | ^^^^^^ note: the lint level is defined here - --> $DIR/lint-ctypes-enum.rs:3:9 + --> $DIR/lint-ctypes-enum.rs:2:9 | LL | #![deny(improper_ctypes)] | ^^^^^^^^^^^^^^^ error: `extern` block uses type `B`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:61:13 + --> $DIR/lint-ctypes-enum.rs:62:13 | LL | fn bf(x: B); | ^ not FFI-safe @@ -26,13 +26,13 @@ LL | fn bf(x: B); = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint note: the type is defined here - --> $DIR/lint-ctypes-enum.rs:12:1 + --> $DIR/lint-ctypes-enum.rs:13:1 | LL | enum B { | ^^^^^^ error: `extern` block uses type `T`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:62:13 + --> $DIR/lint-ctypes-enum.rs:63:13 | LL | fn tf(x: T); | ^ not FFI-safe @@ -40,50 +40,50 @@ LL | fn tf(x: T); = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint note: the type is defined here - --> $DIR/lint-ctypes-enum.rs:16:1 + --> $DIR/lint-ctypes-enum.rs:17:1 | LL | enum T { | ^^^^^^ error: `extern` block uses type `u128`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:74:23 + --> $DIR/lint-ctypes-enum.rs:75:23 | -LL | fn nonzero_u128(x: Option); - | ^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn nonzero_u128(x: Option>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = note: 128-bit integers don't currently have a known stable ABI error: `extern` block uses type `i128`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:81:23 + --> $DIR/lint-ctypes-enum.rs:82:23 | -LL | fn nonzero_i128(x: Option); - | ^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn nonzero_i128(x: Option>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = note: 128-bit integers don't currently have a known stable ABI error: `extern` block uses type `Option>>`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:86:28 + --> $DIR/lint-ctypes-enum.rs:87:28 | -LL | fn transparent_union(x: Option>); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn transparent_union(x: Option>>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint error: `extern` block uses type `Option>>`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:88:20 + --> $DIR/lint-ctypes-enum.rs:89:20 | -LL | fn repr_rust(x: Option>); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn repr_rust(x: Option>>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint error: `extern` block uses type `Result<(), NonZero>`, which is not FFI-safe - --> $DIR/lint-ctypes-enum.rs:89:20 + --> $DIR/lint-ctypes-enum.rs:90:20 | -LL | fn no_result(x: Result<(), num::NonZeroI32>); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe +LL | fn no_result(x: Result<(), num::NonZero>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe | = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum = note: enum has no representation hint diff --git a/tests/ui/numbers-arithmetic/overflowing-neg-nonzero.rs b/tests/ui/numbers-arithmetic/overflowing-neg-nonzero.rs index dc3b6c280f3..bda5cef979e 100644 --- a/tests/ui/numbers-arithmetic/overflowing-neg-nonzero.rs +++ b/tests/ui/numbers-arithmetic/overflowing-neg-nonzero.rs @@ -2,11 +2,11 @@ //@ error-pattern:attempt to negate with overflow //@ ignore-emscripten no processes //@ compile-flags: -C debug-assertions - #![allow(arithmetic_overflow)] +#![feature(generic_nonzero)] -use std::num::NonZeroI8; +use std::num::NonZero; fn main() { - let _x = -NonZeroI8::new(i8::MIN).unwrap(); + let _x = -NonZero::new(i8::MIN).unwrap(); } diff --git a/tests/ui/print_type_sizes/niche-filling.rs b/tests/ui/print_type_sizes/niche-filling.rs index da11f0c0121..07da1cff27a 100644 --- a/tests/ui/print_type_sizes/niche-filling.rs +++ b/tests/ui/print_type_sizes/niche-filling.rs @@ -1,24 +1,24 @@ +//! This file illustrates how niche-filling enums are handled, +//! modelled after cases like `Option<&u32>`, `Option` and such. +//! +//! It uses `NonZero` rather than `&_` or `Unique<_>`, because +//! the test is not set up to deal with target-dependent pointer width. +//! +//! It avoids using `u64`/`i64` because on some targets that is only 4-byte +//! aligned (while on most it is 8-byte aligned) and so the resulting +//! padding and overall computed sizes can be quite different. +//! //@ compile-flags: -Z print-type-sizes --crate-type lib //@ ignore-debug: debug assertions will print more types //@ build-pass //@ ignore-pass -// ^-- needed because `--pass check` does not emit the output needed. -// FIXME: consider using an attribute instead of side-effects. - -// This file illustrates how niche-filling enums are handled, -// modelled after cases like `Option<&u32>`, `Option` and such. -// -// It uses NonZeroU32 rather than `&_` or `Unique<_>`, because -// the test is not set up to deal with target-dependent pointer width. -// -// It avoids using u64/i64 because on some targets that is only 4-byte -// aligned (while on most it is 8-byte aligned) and so the resulting -// padding and overall computed sizes can be quite different. - -#![feature(rustc_attrs)] +// ^-- needed because `--pass check` does not emit the output needed. +// FIXME: consider using an attribute instead of side-effects. #![allow(dead_code)] +#![feature(generic_nonzero)] +#![feature(rustc_attrs)] -use std::num::NonZeroU32; +use std::num::NonZero; pub enum MyOption { None, Some(T) } @@ -34,7 +34,7 @@ impl Default for MyOption { pub enum EmbeddedDiscr { None, - Record { pre: u8, val: NonZeroU32, post: u16 }, + Record { pre: u8, val: NonZero, post: u16 }, } impl Default for EmbeddedDiscr { @@ -50,13 +50,13 @@ pub struct IndirectNonZero { pub struct NestedNonZero { pre: u8, - val: NonZeroU32, + val: NonZero, post: u16, } impl Default for NestedNonZero { fn default() -> Self { - NestedNonZero { pre: 0, val: unsafe { NonZeroU32::new_unchecked(1) }, post: 0 } + NestedNonZero { pre: 0, val: unsafe { NonZero::new_unchecked(1) }, post: 0 } } } @@ -77,7 +77,7 @@ pub union Union2 { } pub fn test() { - let _x: MyOption = Default::default(); + let _x: MyOption> = Default::default(); let _y: EmbeddedDiscr = Default::default(); let _z: MyOption = Default::default(); let _a: MyOption = Default::default(); @@ -90,9 +90,9 @@ pub fn test() { let _h: MyOption = Default::default(); // Unions do not currently participate in niche filling. - let _i: MyOption> = Default::default(); + let _i: MyOption, u32>> = Default::default(); // ...even when theoretically possible. - let _j: MyOption> = Default::default(); - let _k: MyOption> = Default::default(); + let _j: MyOption>> = Default::default(); + let _k: MyOption, NonZero>> = Default::default(); } diff --git a/tests/ui/structs-enums/enum-null-pointer-opt.rs b/tests/ui/structs-enums/enum-null-pointer-opt.rs index 6f8c8168968..a8418943ba4 100644 --- a/tests/ui/structs-enums/enum-null-pointer-opt.rs +++ b/tests/ui/structs-enums/enum-null-pointer-opt.rs @@ -1,8 +1,9 @@ //@ run-pass +#![feature(generic_nonzero)] #![feature(transparent_unions)] use std::mem::size_of; -use std::num::NonZeroUsize; +use std::num::NonZero; use std::ptr::NonNull; use std::rc::Rc; use std::sync::Arc; @@ -57,7 +58,7 @@ fn main() { assert_eq!(size_of::<[Box; 1]>(), size_of::; 1]>>()); // Should apply to NonZero - assert_eq!(size_of::(), size_of::>()); + assert_eq!(size_of::>(), size_of::>>()); assert_eq!(size_of::>(), size_of::>>()); // Should apply to types that use NonZero internally diff --git a/tests/ui/structs-enums/enum-null-pointer-opt.stderr b/tests/ui/structs-enums/enum-null-pointer-opt.stderr index 64e93ffaffd..fca62bd1c80 100644 --- a/tests/ui/structs-enums/enum-null-pointer-opt.stderr +++ b/tests/ui/structs-enums/enum-null-pointer-opt.stderr @@ -1,5 +1,5 @@ warning: method `dummy` is never used - --> $DIR/enum-null-pointer-opt.rs:10:18 + --> $DIR/enum-null-pointer-opt.rs:11:18 | LL | trait Trait { fn dummy(&self) { } } | ----- ^^^^^ diff --git a/tests/ui/structs-enums/type-sizes.rs b/tests/ui/structs-enums/type-sizes.rs index 92060e3cade..66f663ce077 100644 --- a/tests/ui/structs-enums/type-sizes.rs +++ b/tests/ui/structs-enums/type-sizes.rs @@ -2,12 +2,13 @@ #![allow(non_camel_case_types)] #![allow(dead_code)] +#![feature(generic_nonzero)] #![feature(never_type)] #![feature(pointer_is_aligned)] #![feature(strict_provenance)] use std::mem::size_of; -use std::num::{NonZeroU8, NonZeroU16}; +use std::num::NonZero; use std::ptr; use std::ptr::NonNull; use std::borrow::Cow; @@ -110,14 +111,14 @@ enum Option2 { // Two layouts are considered for `CanBeNicheFilledButShouldnt`: // Niche-filling: -// { u32 (4 bytes), NonZeroU8 + tag in niche (1 byte), padding (3 bytes) } +// { u32 (4 bytes), NonZero + tag in niche (1 byte), padding (3 bytes) } // Tagged: -// { tag (1 byte), NonZeroU8 (1 byte), padding (2 bytes), u32 (4 bytes) } +// { tag (1 byte), NonZero (1 byte), padding (2 bytes), u32 (4 bytes) } // Both are the same size (due to padding), // but the tagged layout is better as the tag creates a niche with 254 invalid values, // allowing types like `Option>` to fit into 8 bytes. pub enum CanBeNicheFilledButShouldnt { - A(NonZeroU8, u32), + A(NonZero, u32), B } pub enum AlwaysTaggedBecauseItHasNoNiche { @@ -135,7 +136,7 @@ pub enum NicheFilledMultipleFields { G, } -struct BoolInTheMiddle(std::num::NonZeroU16, bool, u8); +struct BoolInTheMiddle(NonZero, bool, u8); enum NicheWithData { A, @@ -275,7 +276,7 @@ pub fn main() { assert_eq!(size_of::>(), 2); assert_eq!(size_of::>>(), 2); - struct S1{ a: u16, b: std::num::NonZeroU16, c: u16, d: u8, e: u32, f: u64, g:[u8;2] } + struct S1{ a: u16, b: NonZero, c: u16, d: u8, e: u32, f: u64, g:[u8;2] } assert_eq!(size_of::(), 24); assert_eq!(size_of::>(), 24); @@ -287,14 +288,14 @@ pub fn main() { size_of::<(&(), NicheWithData)>() ); - pub enum FillPadding { A(std::num::NonZeroU8, u32), B } + pub enum FillPadding { A(NonZero, u32), B } assert_eq!(size_of::(), 8); assert_eq!(size_of::>(), 8); assert_eq!(size_of::>>(), 8); - assert_eq!(size_of::>(), 4); - assert_eq!(size_of::>>(), 4); - assert_eq!(size_of::>(), 4); + assert_eq!(size_of::, u8, u8), u16>>(), 4); + assert_eq!(size_of::, u8, u8), u16>>>(), 4); + assert_eq!(size_of::, u8, u8, u8), u16>>(), 4); assert_eq!(size_of::>(), 6); assert_eq!(size_of::>(), 4); @@ -314,10 +315,10 @@ pub fn main() { assert_eq!(ptr::from_ref(&v), ptr::from_ref(&v.r.ptr).cast(), "sort niches to the front where possible"); - // Ideal layouts: (bool, u8, NonZeroU16) or (NonZeroU16, u8, bool) + // Ideal layouts: (bool, u8, NonZero) or (NonZero, u8, bool) // Currently the layout algorithm will choose the latter because it doesn't attempt // to aggregate multiple smaller fields to move a niche before a higher-alignment one. - let b = BoolInTheMiddle( NonZeroU16::new(1).unwrap(), true, 0); + let b = BoolInTheMiddle(NonZero::new(1).unwrap(), true, 0); assert!(ptr::from_ref(&b.1).addr() > ptr::from_ref(&b.2).addr()); assert_eq!(size_of::>(), size_of::()); diff --git a/tests/ui/suggestions/core-std-import-order-issue-83564.rs b/tests/ui/suggestions/core-std-import-order-issue-83564.rs index 2cf1983858a..62b9b246cc8 100644 --- a/tests/ui/suggestions/core-std-import-order-issue-83564.rs +++ b/tests/ui/suggestions/core-std-import-order-issue-83564.rs @@ -1,10 +1,11 @@ //@ edition:2018 - +// // This is a regression test for #83564. // For some reason, Rust 2018 or higher is required to reproduce the bug. +#![feature(generic_nonzero)] fn main() { //~^ HELP consider importing one of these items - let _x = NonZeroU32::new(5).unwrap(); - //~^ ERROR failed to resolve: use of undeclared type `NonZeroU32` + let _x = NonZero::new(5u32).unwrap(); + //~^ ERROR failed to resolve: use of undeclared type `NonZero` } diff --git a/tests/ui/suggestions/core-std-import-order-issue-83564.stderr b/tests/ui/suggestions/core-std-import-order-issue-83564.stderr index c2634e3070e..56e10b9340c 100644 --- a/tests/ui/suggestions/core-std-import-order-issue-83564.stderr +++ b/tests/ui/suggestions/core-std-import-order-issue-83564.stderr @@ -1,14 +1,14 @@ -error[E0433]: failed to resolve: use of undeclared type `NonZeroU32` - --> $DIR/core-std-import-order-issue-83564.rs:8:14 +error[E0433]: failed to resolve: use of undeclared type `NonZero` + --> $DIR/core-std-import-order-issue-83564.rs:9:14 | -LL | let _x = NonZeroU32::new(5).unwrap(); - | ^^^^^^^^^^ use of undeclared type `NonZeroU32` +LL | let _x = NonZero::new(5u32).unwrap(); + | ^^^^^^^ use of undeclared type `NonZero` | help: consider importing one of these items | -LL + use core::num::NonZeroU32; +LL + use core::num::NonZero; | -LL + use std::num::NonZeroU32; +LL + use std::num::NonZero; | error: aborting due to 1 previous error diff --git a/tests/ui/traits/next-solver/specialization-transmute.rs b/tests/ui/traits/next-solver/specialization-transmute.rs index 41c90322011..17c55fb4d49 100644 --- a/tests/ui/traits/next-solver/specialization-transmute.rs +++ b/tests/ui/traits/next-solver/specialization-transmute.rs @@ -1,6 +1,6 @@ //@ compile-flags: -Znext-solver //~^ ERROR cannot normalize `::Id: '_` - +#![feature(generic_nonzero)] #![feature(specialization)] //~^ WARN the feature `specialization` is incomplete @@ -23,8 +23,9 @@ fn transmute, U: Copy>(t: T) -> U { *t.intu() } -use std::num::NonZeroU8; +use std::num::NonZero; + fn main() { - let s = transmute::>(0); //~ ERROR cannot satisfy + let s = transmute::>>(0); //~ ERROR cannot satisfy assert_eq!(s, None); } diff --git a/tests/ui/traits/next-solver/specialization-transmute.stderr b/tests/ui/traits/next-solver/specialization-transmute.stderr index c87612d6a26..65e33700325 100644 --- a/tests/ui/traits/next-solver/specialization-transmute.stderr +++ b/tests/ui/traits/next-solver/specialization-transmute.stderr @@ -23,10 +23,10 @@ LL | self | ^^^^ cannot satisfy `T <: ::Id` error[E0284]: type annotations needed: cannot satisfy `::Id == Option>` - --> $DIR/specialization-transmute.rs:28:13 + --> $DIR/specialization-transmute.rs:29:13 | -LL | let s = transmute::>(0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot satisfy `::Id == Option>` +LL | let s = transmute::>>(0); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot satisfy `::Id == Option>` | note: required by a bound in `transmute` --> $DIR/specialization-transmute.rs:22:25 From fa7557181f371d88fb67f17b85827954f79cdf94 Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Sun, 25 Feb 2024 17:51:56 +0530 Subject: [PATCH 88/92] Don't use `unwrap()` in `ArrayIntoIter` lint when typeck fails --- compiler/rustc_lint/src/array_into_iter.rs | 14 +++++++++----- .../lint/ice-array-into-iter-lint-issue-121532.rs | 11 +++++++++++ .../ice-array-into-iter-lint-issue-121532.stderr | 9 +++++++++ 3 files changed, 29 insertions(+), 5 deletions(-) create mode 100644 tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs create mode 100644 tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr diff --git a/compiler/rustc_lint/src/array_into_iter.rs b/compiler/rustc_lint/src/array_into_iter.rs index 3a5c585366a..993b1d739a1 100644 --- a/compiler/rustc_lint/src/array_into_iter.rs +++ b/compiler/rustc_lint/src/array_into_iter.rs @@ -70,11 +70,15 @@ impl<'tcx> LateLintPass<'tcx> for ArrayIntoIter { // Check if the method call actually calls the libcore // `IntoIterator::into_iter`. - let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap(); - match cx.tcx.trait_of_item(def_id) { - Some(trait_id) if cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id) => {} - _ => return, - }; + let trait_id = cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .and_then(|did| cx.tcx.trait_of_item(did)); + if trait_id.is_none() + || !cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id.unwrap()) + { + return; + } // As this is a method call expression, we have at least one argument. let receiver_ty = cx.typeck_results().expr_ty(receiver_arg); diff --git a/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs new file mode 100644 index 00000000000..92cab01fe48 --- /dev/null +++ b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs @@ -0,0 +1,11 @@ +// Regression test for #121532 +// Checks the we don't ICE in ArrayIntoIter +// lint when typeck has failed + + // Typeck fails for the arg type as + // `Self` makes no sense here +fn func(a: Self::ItemsIterator) { //~ ERROR failed to resolve: `Self` is only available in impls, traits, and type definitions + a.into_iter(); +} + +fn main() {} diff --git a/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr new file mode 100644 index 00000000000..73ceddae940 --- /dev/null +++ b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr @@ -0,0 +1,9 @@ +error[E0433]: failed to resolve: `Self` is only available in impls, traits, and type definitions + --> $DIR/ice-array-into-iter-lint-issue-121532.rs:7:12 + | +LL | fn func(a: Self::ItemsIterator) { + | ^^^^ `Self` is only available in impls, traits, and type definitions + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0433`. From 08caefbb103d1809113172d16eaf8f66c2edc2f1 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Wed, 20 Dec 2023 11:57:19 +0000 Subject: [PATCH 89/92] Windows: Use ProcessPrng for random keys --- library/std/src/sys/pal/windows/c.rs | 15 +++++++ .../std/src/sys/pal/windows/c/bindings.txt | 4 -- .../std/src/sys/pal/windows/c/windows_sys.rs | 12 ------ library/std/src/sys/pal/windows/rand.rs | 41 ++++++------------- 4 files changed, 28 insertions(+), 44 deletions(-) diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 6b12d7db8b0..ad8e01bfa9b 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -321,6 +321,21 @@ pub unsafe fn NtWriteFile( } } +// Use raw-dylib to import ProcessPrng as we can't rely on there being an import library. +cfg_if::cfg_if! { +if #[cfg(not(target_vendor = "win7"))] { + #[cfg(target_arch = "x86")] + #[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")] + extern "system" { + pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL; + } + #[cfg(not(target_arch = "x86"))] + #[link(name = "bcryptprimitives", kind = "raw-dylib")] + extern "system" { + pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL; + } +}} + // Functions that aren't available on every version of Windows that we support, // but we still use them and just provide some form of a fallback implementation. compat_fn_with_fallback! { diff --git a/library/std/src/sys/pal/windows/c/bindings.txt b/library/std/src/sys/pal/windows/c/bindings.txt index 726f1c3df82..ab2a8caf5df 100644 --- a/library/std/src/sys/pal/windows/c/bindings.txt +++ b/library/std/src/sys/pal/windows/c/bindings.txt @@ -2180,10 +2180,6 @@ Windows.Win32.Networking.WinSock.WSATRY_AGAIN Windows.Win32.Networking.WinSock.WSATYPE_NOT_FOUND Windows.Win32.Networking.WinSock.WSAVERNOTSUPPORTED Windows.Win32.Security.Authentication.Identity.RtlGenRandom -Windows.Win32.Security.Cryptography.BCRYPT_ALG_HANDLE -Windows.Win32.Security.Cryptography.BCRYPT_USE_SYSTEM_PREFERRED_RNG -Windows.Win32.Security.Cryptography.BCryptGenRandom -Windows.Win32.Security.Cryptography.BCRYPTGENRANDOM_FLAGS Windows.Win32.Security.SECURITY_ATTRIBUTES Windows.Win32.Security.TOKEN_ACCESS_MASK Windows.Win32.Security.TOKEN_ACCESS_PSEUDO_HANDLE diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index c386b66a722..8eb779373f7 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -15,15 +15,6 @@ extern "system" { pub fn RtlGenRandom(randombuffer: *mut ::core::ffi::c_void, randombufferlength: u32) -> BOOLEAN; } -#[link(name = "bcrypt")] -extern "system" { - pub fn BCryptGenRandom( - halgorithm: BCRYPT_ALG_HANDLE, - pbbuffer: *mut u8, - cbbuffer: u32, - dwflags: BCRYPTGENRANDOM_FLAGS, - ) -> NTSTATUS; -} #[link(name = "kernel32")] extern "system" { pub fn AcquireSRWLockExclusive(srwlock: *mut SRWLOCK) -> (); @@ -889,9 +880,6 @@ impl ::core::clone::Clone for ARM64_NT_NEON128_0 { *self } } -pub type BCRYPTGENRANDOM_FLAGS = u32; -pub type BCRYPT_ALG_HANDLE = *mut ::core::ffi::c_void; -pub const BCRYPT_USE_SYSTEM_PREFERRED_RNG: BCRYPTGENRANDOM_FLAGS = 2u32; pub const BELOW_NORMAL_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 16384u32; pub type BOOL = i32; pub type BOOLEAN = u8; diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs index bd1ae6b0607..e427546222a 100644 --- a/library/std/src/sys/pal/windows/rand.rs +++ b/library/std/src/sys/pal/windows/rand.rs @@ -1,42 +1,27 @@ -use crate::mem; -use crate::ptr; use crate::sys::c; +use core::mem; +use core::ptr; +#[cfg(not(target_vendor = "win7"))] +#[inline] pub fn hashmap_random_keys() -> (u64, u64) { let mut v = (0, 0); - let ret = unsafe { - c::BCryptGenRandom( - ptr::null_mut(), - core::ptr::addr_of_mut!(v) as *mut u8, - mem::size_of_val(&v) as c::ULONG, - c::BCRYPT_USE_SYSTEM_PREFERRED_RNG, - ) - }; - if c::nt_success(ret) { v } else { fallback_rng() } + let ret = unsafe { c::ProcessPrng(ptr::addr_of_mut!(v).cast::(), mem::size_of_val(&v)) }; + // ProcessPrng is documented as always returning `TRUE`. + // https://learn.microsoft.com/en-us/windows/win32/seccng/processprng#return-value + debug_assert_eq!(ret, c::TRUE); + v } -/// Generate random numbers using the fallback RNG function (RtlGenRandom) -/// -/// This is necessary because of a failure to load the SysWOW64 variant of the -/// bcryptprimitives.dll library from code that lives in bcrypt.dll -/// See -#[cfg(not(target_vendor = "uwp"))] -#[inline(never)] -fn fallback_rng() -> (u64, u64) { +#[cfg(target_vendor = "win7")] +pub fn hashmap_random_keys() -> (u64, u64) { use crate::ffi::c_void; use crate::io; let mut v = (0, 0); let ret = unsafe { - c::RtlGenRandom(core::ptr::addr_of_mut!(v) as *mut c_void, mem::size_of_val(&v) as c::ULONG) + c::RtlGenRandom(ptr::addr_of_mut!(v).cast::(), mem::size_of_val(&v) as c::ULONG) }; - if ret != 0 { v } else { panic!("fallback RNG broken: {}", io::Error::last_os_error()) } -} - -/// We can't use RtlGenRandom with UWP, so there is no fallback -#[cfg(target_vendor = "uwp")] -#[inline(never)] -fn fallback_rng() -> (u64, u64) { - panic!("fallback RNG broken: RtlGenRandom() not supported on UWP"); + if ret != 0 { v } else { panic!("RNG broken: {}", io::Error::last_os_error()) } } From 8f89cbd796fe54b3454175193d6c2be245376161 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Tue, 20 Feb 2024 08:51:24 -0300 Subject: [PATCH 90/92] Add ProcessPrng shim to Miri This is essentially the same as SystemFunction036 (aka RtlGenRandom) except that the given length is a usize instead of a u32 --- src/tools/miri/src/shims/windows/foreign_items.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index bddc30b8379..fdd7fc5fad4 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -427,6 +427,14 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { this.gen_random(ptr, len.into())?; this.write_scalar(Scalar::from_bool(true), dest)?; } + "ProcessPrng" => { + let [ptr, len] = + this.check_shim(abi, Abi::System { unwind: false }, link_name, args)?; + let ptr = this.read_pointer(ptr)?; + let len = this.read_target_usize(len)?; + this.gen_random(ptr, len.into())?; + this.write_scalar(Scalar::from_i32(1), dest)?; + } "BCryptGenRandom" => { let [algorithm, ptr, len, flags] = this.check_shim(abi, Abi::System { unwind: false }, link_name, args)?; From 843eaf2e71f9a1ae564c5d24ec69c7a0c29e4e53 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Tue, 20 Feb 2024 16:09:49 -0300 Subject: [PATCH 91/92] Cranelift: Don't use raw-dylib in std --- .../0029-stdlib-rawdylib-processprng.patch | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch new file mode 100644 index 00000000000..6af11e54d88 --- /dev/null +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch @@ -0,0 +1,47 @@ +From 9f65e742ba3e41474e6126c6c4469c48eaa6ca7e Mon Sep 17 00:00:00 2001 +From: Chris Denton +Date: Tue, 20 Feb 2024 16:01:40 -0300 +Subject: [PATCH] Don't use raw-dylib in std + +--- + library/std/src/sys/pal/windows/c.rs | 2 +- + library/std/src/sys/pal/windows/rand.rs | 3 +-- + 2 files changed, 2 insertions(+), 3 deletions(-) + +diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs +index ad8e01bfa9b..9ca8e4c16ce 100644 +--- a/library/std/src/sys/pal/windows/c.rs ++++ b/library/std/src/sys/pal/windows/c.rs +@@ -323,7 +323,7 @@ pub unsafe fn NtWriteFile( + + // Use raw-dylib to import ProcessPrng as we can't rely on there being an import library. + cfg_if::cfg_if! { +-if #[cfg(not(target_vendor = "win7"))] { ++if #[cfg(any())] { + #[cfg(target_arch = "x86")] + #[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")] + extern "system" { +diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs +index e427546222a..f2fe42a4d51 100644 +--- a/library/std/src/sys/pal/windows/rand.rs ++++ b/library/std/src/sys/pal/windows/rand.rs +@@ -2,7 +2,7 @@ + use core::mem; + use core::ptr; + +-#[cfg(not(target_vendor = "win7"))] ++#[cfg(any())] + #[inline] + pub fn hashmap_random_keys() -> (u64, u64) { + let mut v = (0, 0); +@@ -13,7 +13,6 @@ pub fn hashmap_random_keys() -> (u64, u64) { + v + } + +-#[cfg(target_vendor = "win7")] + pub fn hashmap_random_keys() -> (u64, u64) { + use crate::ffi::c_void; + use crate::io; +-- +2.42.0.windows.2 + From 633c92cd6de9e269b9c5b10f341fc10280503f0c Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Mon, 26 Feb 2024 15:22:22 +0530 Subject: [PATCH 92/92] Do not const pop unions as they can made to produce values whose types don't match their underlying layout types which can lead to ICEs on eval --- .../src/known_panics_lint.rs | 40 ++++++++++++------- ...e-unions-known-panics-lint-issue-121534.rs | 21 ++++++++++ 2 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 tests/ui/lint/ice-unions-known-panics-lint-issue-121534.rs diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index a9cd688c315..7cab6650994 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -585,20 +585,32 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { val.into() } - Aggregate(ref kind, ref fields) => Value::Aggregate { - fields: fields - .iter() - .map(|field| self.eval_operand(field).map_or(Value::Uninit, Value::Immediate)) - .collect(), - variant: match **kind { - AggregateKind::Adt(_, variant, _, _, _) => variant, - AggregateKind::Array(_) - | AggregateKind::Tuple - | AggregateKind::Closure(_, _) - | AggregateKind::Coroutine(_, _) - | AggregateKind::CoroutineClosure(_, _) => VariantIdx::new(0), - }, - }, + Aggregate(ref kind, ref fields) => { + // Do not const pop union fields as they can be + // made to produce values that don't match their + // underlying layout's type (see ICE #121534). + // If the last element of the `Adt` tuple + // is `Some` it indicates the ADT is a union + if let AggregateKind::Adt(_, _, _, _, Some(_)) = **kind { + return None; + }; + Value::Aggregate { + fields: fields + .iter() + .map(|field| { + self.eval_operand(field).map_or(Value::Uninit, Value::Immediate) + }) + .collect(), + variant: match **kind { + AggregateKind::Adt(_, variant, _, _, _) => variant, + AggregateKind::Array(_) + | AggregateKind::Tuple + | AggregateKind::Closure(_, _) + | AggregateKind::Coroutine(_, _) + | AggregateKind::CoroutineClosure(_, _) => VariantIdx::new(0), + }, + } + } Repeat(ref op, n) => { trace!(?op, ?n); diff --git a/tests/ui/lint/ice-unions-known-panics-lint-issue-121534.rs b/tests/ui/lint/ice-unions-known-panics-lint-issue-121534.rs new file mode 100644 index 00000000000..9fadb828b3d --- /dev/null +++ b/tests/ui/lint/ice-unions-known-panics-lint-issue-121534.rs @@ -0,0 +1,21 @@ +// Regression test for #121534 +// Tests that no ICE occurs in KnownPanicsLint when it +// evaluates an operation whose operands have different +// layout types even though they have the same type. +// This situation can be contrived through the use of +// unions as in this test + +//@ build-pass +union Union { + u32_field: u32, + i32_field: i32, +} + +pub fn main() { + let u32_variant = Union { u32_field: 2 }; + let i32_variant = Union { i32_field: 3 }; + let a = unsafe { u32_variant.u32_field }; + let b = unsafe { i32_variant.u32_field }; + + let _diff = a - b; +}