1
Fork 0

Merge pull request #18967 from Veykril/push-pwonkmwqmmol

Properly record meaningful imports as re-exports in symbol index
This commit is contained in:
Lukas Wirth 2025-01-20 13:46:52 +00:00 committed by GitHub
commit 1eb9d15e42
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 382 additions and 279 deletions

View file

@ -523,6 +523,7 @@ dependencies = [
"hir-def", "hir-def",
"hir-expand", "hir-expand",
"hir-ty", "hir-ty",
"indexmap",
"intern", "intern",
"itertools", "itertools",
"rustc-hash 2.0.0", "rustc-hash 2.0.0",

View file

@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]
rust-version = "1.82" rust-version = "1.83"
edition = "2021" edition = "2021"
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"] authors = ["rust-analyzer team"]

View file

@ -244,7 +244,7 @@ bitflags::bitflags! {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData { pub struct TraitData {
pub name: Name, pub name: Name,
pub items: Vec<(Name, AssocItemId)>, pub items: Box<[(Name, AssocItemId)]>,
pub flags: TraitFlags, pub flags: TraitFlags,
pub visibility: RawVisibility, pub visibility: RawVisibility,
// box it as the vec is usually empty anyways // box it as the vec is usually empty anyways
@ -360,7 +360,7 @@ impl TraitAliasData {
pub struct ImplData { pub struct ImplData {
pub target_trait: Option<TraitRef>, pub target_trait: Option<TraitRef>,
pub self_ty: TypeRefId, pub self_ty: TypeRefId,
pub items: Box<[AssocItemId]>, pub items: Box<[(Name, AssocItemId)]>,
pub is_negative: bool, pub is_negative: bool,
pub is_unsafe: bool, pub is_unsafe: bool,
// box it as the vec is usually empty anyways // box it as the vec is usually empty anyways
@ -393,7 +393,6 @@ impl ImplData {
collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items); collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
let (items, macro_calls, diagnostics) = collector.finish(); let (items, macro_calls, diagnostics) = collector.finish();
let items = items.into_iter().map(|(_, item)| item).collect();
( (
Arc::new(ImplData { Arc::new(ImplData {
@ -648,12 +647,12 @@ impl<'a> AssocItemCollector<'a> {
fn finish( fn finish(
self, self,
) -> ( ) -> (
Vec<(Name, AssocItemId)>, Box<[(Name, AssocItemId)]>,
Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
Vec<DefDiagnostic>, Vec<DefDiagnostic>,
) { ) {
( (
self.items, self.items.into_boxed_slice(),
if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) }, if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
self.diagnostics, self.diagnostics,
) )

View file

@ -162,6 +162,20 @@ impl ItemScope {
.map(move |name| (name, self.get(name))) .map(move |name| (name, self.get(name)))
} }
pub fn values(&self) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportId>)> + '_ {
self.values.iter().map(|(n, &i)| (n, i))
}
pub fn types(
&self,
) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportOrExternCrate>)> + '_ {
self.types.iter().map(|(n, &i)| (n, i))
}
pub fn macros(&self) -> impl Iterator<Item = (&Name, Item<MacroId, ImportId>)> + '_ {
self.macros.iter().map(|(n, &i)| (n, i))
}
pub fn imports(&self) -> impl Iterator<Item = ImportId> + '_ { pub fn imports(&self) -> impl Iterator<Item = ImportId> + '_ {
self.use_imports_types self.use_imports_types
.keys() .keys()
@ -263,11 +277,6 @@ impl ItemScope {
self.unnamed_consts.iter().copied() self.unnamed_consts.iter().copied()
} }
/// Iterate over all module scoped macros
pub(crate) fn macros(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
}
/// Iterate over all legacy textual scoped macros visible at the end of the module /// Iterate over all legacy textual scoped macros visible at the end of the module
pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ { pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ {
self.legacy_macros.iter().map(|(name, def)| (name, &**def)) self.legacy_macros.iter().map(|(name, def)| (name, &**def))

View file

@ -107,7 +107,7 @@ impl LangItems {
for (_, module_data) in crate_def_map.modules() { for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() { for impl_def in module_data.scope.impls() {
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef); lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
for assoc in db.impl_data(impl_def).items.iter().copied() { for &(_, assoc) in db.impl_data(impl_def).items.iter() {
match assoc { match assoc {
AssocItemId::FunctionId(f) => { AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function) lang_items.collect_lang_item(db, f, LangItemTarget::Function)

View file

@ -502,7 +502,7 @@ impl ModuleId {
} }
/// Whether this module represents the crate root module /// Whether this module represents the crate root module
fn is_crate_root(&self) -> bool { pub fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none() self.local_id == DefMap::ROOT && self.block.is_none()
} }
} }

View file

@ -717,8 +717,8 @@ impl DefCollector<'_> {
} }
} }
None => { None => {
for (name, def) in root_scope.macros() { for (name, it) in root_scope.macros() {
self.def_map.macro_use_prelude.insert(name.clone(), (def, extern_crate)); self.def_map.macro_use_prelude.insert(name.clone(), (it.def, extern_crate));
} }
} }
} }

View file

@ -240,12 +240,12 @@ impl Visibility {
if a_ancestors.any(|m| m == mod_b.local_id) { if a_ancestors.any(|m| m == mod_b.local_id) {
// B is above A // B is above A
return Some(Visibility::Module(mod_a, expl_b)); return Some(Visibility::Module(mod_a, expl_a));
} }
if b_ancestors.any(|m| m == mod_a.local_id) { if b_ancestors.any(|m| m == mod_a.local_id) {
// A is above B // A is above B
return Some(Visibility::Module(mod_b, expl_a)); return Some(Visibility::Module(mod_b, expl_b));
} }
None None

View file

@ -11,7 +11,7 @@ use syntax::utils::is_raw_identifier;
/// and declarations. In theory, names should also carry hygiene info, but we are /// and declarations. In theory, names should also carry hygiene info, but we are
/// not there yet! /// not there yet!
/// ///
/// Note that the rawness (`r#`) of names does not depend on whether they are written raw. /// Note that the rawness (`r#`) of names is not preserved. Names are always stored without a `r#` prefix.
/// This is because we want to show (in completions etc.) names as raw depending on the needs /// This is because we want to show (in completions etc.) names as raw depending on the needs
/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining /// of the current crate, for example if it is edition 2021 complete `gen` even if the defining
/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well. /// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well.
@ -77,6 +77,7 @@ impl Name {
/// Hopefully, this should allow us to integrate hygiene cleaner in the /// Hopefully, this should allow us to integrate hygiene cleaner in the
/// future, and to switch to interned representation of names. /// future, and to switch to interned representation of names.
fn new_text(text: &str) -> Name { fn new_text(text: &str) -> Name {
debug_assert!(!text.starts_with("r#"));
Name { symbol: Symbol::intern(text), ctx: () } Name { symbol: Symbol::intern(text), ctx: () }
} }
@ -91,15 +92,34 @@ impl Name {
pub fn new_root(text: &str) -> Name { pub fn new_root(text: &str) -> Name {
// The edition doesn't matter for hygiene. // The edition doesn't matter for hygiene.
Self::new(text, SyntaxContextId::root(Edition::Edition2015)) Self::new(text.trim_start_matches("r#"), SyntaxContextId::root(Edition::Edition2015))
} }
pub fn new_tuple_field(idx: usize) -> Name { pub fn new_tuple_field(idx: usize) -> Name {
Name { symbol: Symbol::intern(&idx.to_string()), ctx: () } let symbol = match idx {
0 => sym::INTEGER_0.clone(),
1 => sym::INTEGER_1.clone(),
2 => sym::INTEGER_2.clone(),
3 => sym::INTEGER_3.clone(),
4 => sym::INTEGER_4.clone(),
5 => sym::INTEGER_5.clone(),
6 => sym::INTEGER_6.clone(),
7 => sym::INTEGER_7.clone(),
8 => sym::INTEGER_8.clone(),
9 => sym::INTEGER_9.clone(),
10 => sym::INTEGER_10.clone(),
11 => sym::INTEGER_11.clone(),
12 => sym::INTEGER_12.clone(),
13 => sym::INTEGER_13.clone(),
14 => sym::INTEGER_14.clone(),
15 => sym::INTEGER_15.clone(),
_ => Symbol::intern(&idx.to_string()),
};
Name { symbol, ctx: () }
} }
pub fn new_lifetime(lt: &ast::Lifetime) -> Name { pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () } Self::new_text(lt.text().as_str().trim_start_matches("r#"))
} }
/// Resolve a name from the text of token. /// Resolve a name from the text of token.
@ -142,15 +162,18 @@ impl Name {
} }
/// Returns the text this name represents if it isn't a tuple field. /// Returns the text this name represents if it isn't a tuple field.
///
/// Do not use this for user-facing text, use `display` instead to handle editions properly.
pub fn as_str(&self) -> &str { pub fn as_str(&self) -> &str {
self.symbol.as_str() self.symbol.as_str()
} }
// FIXME: Remove this
pub fn unescaped(&self) -> UnescapedName<'_> { pub fn unescaped(&self) -> UnescapedName<'_> {
UnescapedName(self) UnescapedName(self)
} }
pub fn is_escaped(&self, edition: Edition) -> bool { pub fn needs_escape(&self, edition: Edition) -> bool {
is_raw_identifier(self.symbol.as_str(), edition) is_raw_identifier(self.symbol.as_str(), edition)
} }
@ -173,16 +196,19 @@ impl Name {
&self.symbol &self.symbol
} }
pub const fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self { pub fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
debug_assert!(!symbol.as_str().starts_with("r#"));
_ = ctx; _ = ctx;
Self { symbol, ctx: () } Self { symbol, ctx: () }
} }
// FIXME: This needs to go once we have hygiene // FIXME: This needs to go once we have hygiene
pub const fn new_symbol_root(sym: Symbol) -> Self { pub fn new_symbol_root(sym: Symbol) -> Self {
debug_assert!(!sym.as_str().starts_with("r#"));
Self { symbol: sym, ctx: () } Self { symbol: sym, ctx: () }
} }
// FIXME: Remove this
#[inline] #[inline]
pub fn eq_ident(&self, ident: &str) -> bool { pub fn eq_ident(&self, ident: &str) -> bool {
self.as_str() == ident.trim_start_matches("r#") self.as_str() == ident.trim_start_matches("r#")

View file

@ -856,7 +856,7 @@ fn impl_def_datum(
let associated_ty_value_ids = impl_data let associated_ty_value_ids = impl_data
.items .items
.iter() .iter()
.filter_map(|item| match item { .filter_map(|(_, item)| match item {
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias), AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
_ => None, _ => None,
}) })

View file

@ -746,16 +746,9 @@ fn lookup_impl_assoc_item_for_trait_ref(
let table = InferenceTable::new(db, env); let table = InferenceTable::new(db, env);
let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?; let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
let item = impl_data.items.iter().find_map(|&it| match it { let item = impl_data.items.iter().find_map(|(n, it)| match *it {
AssocItemId::FunctionId(f) => { AssocItemId::FunctionId(f) => (n == name).then_some(AssocItemId::FunctionId(f)),
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f)) AssocItemId::ConstId(c) => (n == name).then_some(AssocItemId::ConstId(c)),
}
AssocItemId::ConstId(c) => db
.const_data(c)
.name
.as_ref()
.map(|n| n == name)
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
AssocItemId::TypeAliasId(_) => None, AssocItemId::TypeAliasId(_) => None,
})?; })?;
Some((item, impl_subst)) Some((item, impl_subst))
@ -850,7 +843,7 @@ fn is_inherent_impl_coherent(
}; };
rustc_has_incoherent_inherent_impls rustc_has_incoherent_inherent_impls
&& !impl_data.items.is_empty() && !impl_data.items.is_empty()
&& impl_data.items.iter().copied().all(|assoc| match assoc { && impl_data.items.iter().all(|&(_, assoc)| match assoc {
AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl, AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl, AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl, AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
@ -1399,7 +1392,7 @@ fn iterate_inherent_methods(
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> { ) -> ControlFlow<()> {
for &impl_id in impls.for_self_ty(self_ty) { for &impl_id in impls.for_self_ty(self_ty) {
for &item in table.db.impl_data(impl_id).items.iter() { for &(ref item_name, item) in table.db.impl_data(impl_id).items.iter() {
let visible = match is_valid_impl_method_candidate( let visible = match is_valid_impl_method_candidate(
table, table,
self_ty, self_ty,
@ -1408,6 +1401,7 @@ fn iterate_inherent_methods(
name, name,
impl_id, impl_id,
item, item,
item_name,
) { ) {
IsValidCandidate::Yes => true, IsValidCandidate::Yes => true,
IsValidCandidate::NotVisible => false, IsValidCandidate::NotVisible => false,
@ -1467,6 +1461,7 @@ fn is_valid_impl_method_candidate(
name: Option<&Name>, name: Option<&Name>,
impl_id: ImplId, impl_id: ImplId,
item: AssocItemId, item: AssocItemId,
item_name: &Name,
) -> IsValidCandidate { ) -> IsValidCandidate {
match item { match item {
AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate( AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate(
@ -1477,11 +1472,12 @@ fn is_valid_impl_method_candidate(
receiver_ty, receiver_ty,
self_ty, self_ty,
visible_from_module, visible_from_module,
item_name,
), ),
AssocItemId::ConstId(c) => { AssocItemId::ConstId(c) => {
let db = table.db; let db = table.db;
check_that!(receiver_ty.is_none()); check_that!(receiver_ty.is_none());
check_that!(name.is_none_or(|n| db.const_data(c).name.as_ref() == Some(n))); check_that!(name.is_none_or(|n| n == item_name));
if let Some(from_module) = visible_from_module { if let Some(from_module) = visible_from_module {
if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) { if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
@ -1565,11 +1561,13 @@ fn is_valid_impl_fn_candidate(
receiver_ty: Option<&Ty>, receiver_ty: Option<&Ty>,
self_ty: &Ty, self_ty: &Ty,
visible_from_module: Option<ModuleId>, visible_from_module: Option<ModuleId>,
item_name: &Name,
) -> IsValidCandidate { ) -> IsValidCandidate {
check_that!(name.is_none_or(|n| n == item_name));
let db = table.db; let db = table.db;
let data = db.function_data(fn_id); let data = db.function_data(fn_id);
check_that!(name.is_none_or(|n| n == &data.name));
if let Some(from_module) = visible_from_module { if let Some(from_module) = visible_from_module {
if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) { if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) {
cov_mark::hit!(autoderef_candidate_not_visible); cov_mark::hit!(autoderef_candidate_not_visible);

View file

@ -435,7 +435,7 @@ pub(crate) fn visit_module(
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb); visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() { for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
for &item in impl_data.items.iter() { for &(_, item) in impl_data.items.iter() {
match item { match item {
AssocItemId::FunctionId(it) => { AssocItemId::FunctionId(it) => {
let body = db.body(it.into()); let body = db.body(it.into());

View file

@ -20,6 +20,7 @@ itertools.workspace = true
smallvec.workspace = true smallvec.workspace = true
tracing.workspace = true tracing.workspace = true
triomphe.workspace = true triomphe.workspace = true
indexmap.workspace = true
# local deps # local deps
base-db.workspace = true base-db.workspace = true

View file

@ -775,29 +775,16 @@ impl Module {
AssocItemId::ConstId(id) => !db.const_data(id).has_body, AssocItemId::ConstId(id) => !db.const_data(id).has_body,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(), AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
}); });
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map( impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().cloned());
|&item| {
Some((
item,
match item {
AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
AssocItemId::ConstId(it) => {
db.const_data(it).name.as_ref()?.clone()
}
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
},
))
},
));
let redundant = impl_assoc_items_scratch let redundant = impl_assoc_items_scratch
.iter() .iter()
.filter(|(id, name)| { .filter(|(name, id)| {
!items.iter().any(|(impl_name, impl_item)| { !items.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name discriminant(impl_item) == discriminant(id) && impl_name == name
}) })
}) })
.map(|(item, name)| (name.clone(), AssocItem::from(*item))); .map(|(name, item)| (name.clone(), AssocItem::from(*item)));
for (name, assoc_item) in redundant { for (name, assoc_item) in redundant {
acc.push( acc.push(
TraitImplRedundantAssocItems { TraitImplRedundantAssocItems {
@ -812,7 +799,7 @@ impl Module {
let missing: Vec<_> = required_items let missing: Vec<_> = required_items
.filter(|(name, id)| { .filter(|(name, id)| {
!impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| { !impl_assoc_items_scratch.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name discriminant(impl_item) == discriminant(id) && impl_name == name
}) })
}) })
@ -844,7 +831,7 @@ impl Module {
source_map, source_map,
); );
for &item in db.impl_data(impl_def.id).items.iter() { for &(_, item) in db.impl_data(impl_def.id).items.iter() {
AssocItem::from(item).diagnostics(db, acc, style_lints); AssocItem::from(item).diagnostics(db, acc, style_lints);
} }
} }
@ -4307,7 +4294,7 @@ impl Impl {
} }
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> { pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
db.impl_data(self.id).items.iter().map(|&it| it.into()).collect() db.impl_data(self.id).items.iter().map(|&(_, it)| it.into()).collect()
} }
pub fn is_negative(self, db: &dyn HirDatabase) -> bool { pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
@ -5165,7 +5152,7 @@ impl Type {
let impls = db.inherent_impls_in_crate(krate); let impls = db.inherent_impls_in_crate(krate);
for impl_def in impls.for_self_ty(&self.ty) { for impl_def in impls.for_self_ty(&self.ty) {
for &item in db.impl_data(*impl_def).items.iter() { for &(_, item) in db.impl_data(*impl_def).items.iter() {
if callback(item) { if callback(item) {
return; return;
} }

View file

@ -39,8 +39,8 @@ use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams}, ast::{self, HasAttrs as _, HasGenericParams},
AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
TextRange, TextSize, TextSize,
}; };
use triomphe::Arc; use triomphe::Arc;
@ -1591,14 +1591,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_mod_path_relative( pub fn resolve_mod_path_relative(
&self, &self,
to: Module, to: Module,
segments: impl IntoIterator<Item = SmolStr>, segments: impl IntoIterator<Item = Name>,
) -> Option<impl Iterator<Item = ItemInNs>> { ) -> Option<impl Iterator<Item = ItemInNs>> {
let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items( let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
self.db.upcast(), self.db.upcast(),
&ModPath::from_segments( &ModPath::from_segments(hir_def::path::PathKind::Plain, segments),
hir_def::path::PathKind::Plain,
segments.into_iter().map(|it| Name::new_root(&it)),
),
); );
Some(items.iter_items().map(|(item, _)| item.into())) Some(items.iter_items().map(|(item, _)| item.into()))
} }

View file

@ -56,7 +56,7 @@ impl ChildBySource for ImplId {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id); res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
}, },
); );
data.items.iter().for_each(|&item| { data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item); add_assoc_item(db, res, file_id, item);
}); });
} }

View file

@ -1,27 +1,34 @@
//! File symbol extraction. //! File symbol extraction.
use either::Either;
use hir_def::{ use hir_def::{
db::DefDatabase, db::DefDatabase,
item_scope::ItemInNs, item_scope::{ImportId, ImportOrExternCrate},
per_ns::Item,
src::{HasChildSource, HasSource}, src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId, visibility::{Visibility, VisibilityExplicitness},
TraitId, AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
ModuleDefId, ModuleId, TraitId,
}; };
use hir_expand::HirFileId; use hir_expand::{name::Name, HirFileId};
use hir_ty::{ use hir_ty::{
db::HirDatabase, db::HirDatabase,
display::{hir_display_with_types_map, HirDisplay}, display::{hir_display_with_types_map, HirDisplay},
}; };
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Edition; use span::Edition;
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr}; use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
use crate::{Module, ModuleDef, Semantics}; use crate::{Module, ModuleDef, Semantics};
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
/// The actual data that is stored in the index. It should be as compact as /// The actual data that is stored in the index. It should be as compact as
/// possible. /// possible.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileSymbol { pub struct FileSymbol {
pub name: SmolStr, pub name: Symbol,
pub def: ModuleDef, pub def: ModuleDef,
pub loc: DeclarationLocation, pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>, pub container_name: Option<SmolStr>,
@ -37,7 +44,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration. /// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr, pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration. /// This points to the [`syntax::ast::Name`] identifier of the declaration.
pub name_ptr: AstPtr<syntax::ast::Name>, pub name_ptr: AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>,
} }
impl DeclarationLocation { impl DeclarationLocation {
@ -55,7 +62,7 @@ struct SymbolCollectorWork {
pub struct SymbolCollector<'a> { pub struct SymbolCollector<'a> {
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
symbols: Vec<FileSymbol>, symbols: FxIndexSet<FileSymbol>,
work: Vec<SymbolCollectorWork>, work: Vec<SymbolCollectorWork>,
current_container_name: Option<SmolStr>, current_container_name: Option<SmolStr>,
edition: Edition, edition: Edition,
@ -86,11 +93,11 @@ impl<'a> SymbolCollector<'a> {
} }
} }
pub fn finish(self) -> Vec<FileSymbol> { pub fn finish(self) -> Box<[FileSymbol]> {
self.symbols self.symbols.into_iter().collect()
} }
pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> { pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Box<[FileSymbol]> {
let mut symbol_collector = SymbolCollector::new(db); let mut symbol_collector = SymbolCollector::new(db);
symbol_collector.collect(module); symbol_collector.collect(module);
symbol_collector.finish() symbol_collector.finish()
@ -104,96 +111,174 @@ impl<'a> SymbolCollector<'a> {
} }
fn collect_from_module(&mut self, module_id: ModuleId) { fn collect_from_module(&mut self, module_id: ModuleId) {
let def_map = module_id.def_map(self.db.upcast()); let push_decl = |this: &mut Self, def, name| {
let scope = &def_map[module_id.local_id].scope; match def {
ModuleDefId::ModuleId(id) => this.push_module(id, name),
for module_def_id in scope.declarations() {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
ModuleDefId::FunctionId(id) => { ModuleDefId::FunctionId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
self.collect_from_body(id); this.collect_from_body(id);
} }
ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false), ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false),
ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false), ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false),
ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false), ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false),
ModuleDefId::ConstId(id) => { ModuleDefId::ConstId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
self.collect_from_body(id); this.collect_from_body(id);
} }
ModuleDefId::StaticId(id) => { ModuleDefId::StaticId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
self.collect_from_body(id); this.collect_from_body(id);
} }
ModuleDefId::TraitId(id) => { ModuleDefId::TraitId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
self.collect_from_trait(id); this.collect_from_trait(id);
} }
ModuleDefId::TraitAliasId(id) => { ModuleDefId::TraitAliasId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
} }
ModuleDefId::TypeAliasId(id) => { ModuleDefId::TypeAliasId(id) => {
self.push_decl(id, false); this.push_decl(id, name, false);
} }
ModuleDefId::MacroId(id) => match id { ModuleDefId::MacroId(id) => match id {
MacroId::Macro2Id(id) => self.push_decl(id, false), MacroId::Macro2Id(id) => this.push_decl(id, name, false),
MacroId::MacroRulesId(id) => self.push_decl(id, false), MacroId::MacroRulesId(id) => this.push_decl(id, name, false),
MacroId::ProcMacroId(id) => self.push_decl(id, false), MacroId::ProcMacroId(id) => this.push_decl(id, name, false),
}, },
// Don't index these. // Don't index these.
ModuleDefId::BuiltinType(_) => {} ModuleDefId::BuiltinType(_) => {}
ModuleDefId::EnumVariantId(_) => {} ModuleDefId::EnumVariantId(_) => {}
} }
} };
// Nested trees are very common, so a cache here will hit a lot.
let import_child_source_cache = &mut FxHashMap::default();
let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId| {
let source = import_child_source_cache
.entry(i.import)
.or_insert_with(|| i.import.child_source(this.db.upcast()));
let Some(use_tree_src) = source.value.get(i.idx) else { return };
let Some(name_ptr) = use_tree_src
.rename()
.and_then(|rename| rename.name())
.map(Either::Left)
.or_else(|| use_tree_src.path()?.segment()?.name_ref().map(Either::Right))
.map(|it| AstPtr::new(&it))
else {
return;
};
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr,
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def: def.into(),
container_name: this.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
is_assoc: false,
});
};
let push_extern_crate =
|this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId| {
let loc = i.lookup(this.db.upcast());
let source = loc.source(this.db.upcast());
let Some(name_ptr) = source
.value
.rename()
.and_then(|rename| rename.name())
.map(Either::Left)
.or_else(|| source.value.name_ref().map(Either::Right))
.map(|it| AstPtr::new(&it))
else {
return;
};
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr,
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def: def.into(),
container_name: this.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
is_assoc: false,
});
};
let is_explicit_import = |vis| {
match vis {
Visibility::Module(_, VisibilityExplicitness::Explicit) => true,
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
// consider imports in the crate root explicit, as these are visibly
// crate-wide anyways
module_id.is_crate_root()
}
Visibility::Public => true,
}
};
let def_map = module_id.def_map(self.db.upcast());
let scope = &def_map[module_id.local_id].scope;
for impl_id in scope.impls() { for impl_id in scope.impls() {
self.collect_from_impl(impl_id); self.collect_from_impl(impl_id);
} }
// Record renamed imports. for (name, Item { def, vis, import }) in scope.types() {
// FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily if let Some(i) = import {
// for now. if is_explicit_import(vis) {
for id in scope.imports() { match i {
let source = id.import.child_source(self.db.upcast()); ImportOrExternCrate::Import(i) => push_import(self, i, name, def),
let Some(use_tree_src) = source.value.get(id.idx) else { continue }; ImportOrExternCrate::ExternCrate(i) => {
let Some(rename) = use_tree_src.rename() else { continue }; push_extern_crate(self, i, name, def)
let Some(name) = rename.name() else { continue }; }
}
let res = scope.fully_resolve_import(self.db.upcast(), id);
res.iter_items().for_each(|(item, _)| {
let def = match item {
ItemInNs::Types(def) | ItemInNs::Values(def) => def,
ItemInNs::Macros(def) => ModuleDefId::from(def),
} }
.into(); continue;
let dec_loc = DeclarationLocation { }
hir_file_id: source.file_id, // self is a declaration
ptr: SyntaxNodePtr::new(use_tree_src.syntax()), push_decl(self, def, name)
name_ptr: AstPtr::new(&name), }
};
self.symbols.push(FileSymbol { for (name, Item { def, vis, import }) in scope.macros() {
name: name.text().into(), if let Some(i) = import {
def, if is_explicit_import(vis) {
container_name: self.current_container_name.clone(), push_import(self, i, name, def.into());
loc: dec_loc, }
is_alias: false, continue;
is_assoc: false, }
}); // self is a declaration
}); push_decl(self, def.into(), name)
}
for (name, Item { def, vis, import }) in scope.values() {
if let Some(i) = import {
if is_explicit_import(vis) {
push_import(self, i, name, def);
}
continue;
}
// self is a declaration
push_decl(self, def, name)
} }
for const_id in scope.unnamed_consts() { for const_id in scope.unnamed_consts() {
self.collect_from_body(const_id); self.collect_from_body(const_id);
} }
for (_, id) in scope.legacy_macros() { for (name, id) in scope.legacy_macros() {
for &id in id { for &id in id {
if id.module(self.db.upcast()) == module_id { if id.module(self.db.upcast()) == module_id {
match id { match id {
MacroId::Macro2Id(id) => self.push_decl(id, false), MacroId::Macro2Id(id) => self.push_decl(id, name, false),
MacroId::MacroRulesId(id) => self.push_decl(id, false), MacroId::MacroRulesId(id) => self.push_decl(id, name, false),
MacroId::ProcMacroId(id) => self.push_decl(id, false), MacroId::ProcMacroId(id) => self.push_decl(id, name, false),
} }
} }
} }
@ -223,8 +308,8 @@ impl<'a> SymbolCollector<'a> {
.to_smolstr(), .to_smolstr(),
); );
self.with_container_name(impl_name, |s| { self.with_container_name(impl_name, |s| {
for &assoc_item_id in impl_data.items.iter() { for &(ref name, assoc_item_id) in &impl_data.items {
s.push_assoc_item(assoc_item_id) s.push_assoc_item(assoc_item_id, name)
} }
}) })
} }
@ -232,8 +317,8 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_trait(&mut self, trait_id: TraitId) { fn collect_from_trait(&mut self, trait_id: TraitId) {
let trait_data = self.db.trait_data(trait_id); let trait_data = self.db.trait_data(trait_id);
self.with_container_name(Some(trait_data.name.as_str().into()), |s| { self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
for &(_, assoc_item_id) in &trait_data.items { for &(ref name, assoc_item_id) in &trait_data.items {
s.push_assoc_item(assoc_item_id); s.push_assoc_item(assoc_item_id, name);
} }
}); });
} }
@ -266,15 +351,15 @@ impl<'a> SymbolCollector<'a> {
} }
} }
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) { fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) {
match assoc_item_id { match assoc_item_id {
AssocItemId::FunctionId(id) => self.push_decl(id, true), AssocItemId::FunctionId(id) => self.push_decl(id, name, true),
AssocItemId::ConstId(id) => self.push_decl(id, true), AssocItemId::ConstId(id) => self.push_decl(id, name, true),
AssocItemId::TypeAliasId(id) => self.push_decl(id, true), AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true),
} }
} }
fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool) fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
where where
L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>, L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource, <L as Lookup>::Data: HasSource,
@ -287,13 +372,13 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation { let dec_loc = DeclarationLocation {
hir_file_id: source.file_id, hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()), ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr: AstPtr::new(&name_node), name_ptr: AstPtr::new(&name_node).wrap_left(),
}; };
if let Some(attrs) = def.attrs(self.db) { if let Some(attrs) = def.attrs(self.db) {
for alias in attrs.doc_aliases() { for alias in attrs.doc_aliases() {
self.symbols.push(FileSymbol { self.symbols.insert(FileSymbol {
name: alias.as_str().into(), name: alias.clone(),
def, def,
loc: dec_loc.clone(), loc: dec_loc.clone(),
container_name: self.current_container_name.clone(), container_name: self.current_container_name.clone(),
@ -303,8 +388,8 @@ impl<'a> SymbolCollector<'a> {
} }
} }
self.symbols.push(FileSymbol { self.symbols.insert(FileSymbol {
name: name_node.text().into(), name: name.symbol().clone(),
def, def,
container_name: self.current_container_name.clone(), container_name: self.current_container_name.clone(),
loc: dec_loc, loc: dec_loc,
@ -313,7 +398,7 @@ impl<'a> SymbolCollector<'a> {
}); });
} }
fn push_module(&mut self, module_id: ModuleId) { fn push_module(&mut self, module_id: ModuleId, name: &Name) {
let def_map = module_id.def_map(self.db.upcast()); let def_map = module_id.def_map(self.db.upcast());
let module_data = &def_map[module_id.local_id]; let module_data = &def_map[module_id.local_id];
let Some(declaration) = module_data.origin.declaration() else { return }; let Some(declaration) = module_data.origin.declaration() else { return };
@ -322,15 +407,15 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation { let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id, hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()), ptr: SyntaxNodePtr::new(module.syntax()),
name_ptr: AstPtr::new(&name_node), name_ptr: AstPtr::new(&name_node).wrap_left(),
}; };
let def = ModuleDef::Module(module_id.into()); let def = ModuleDef::Module(module_id.into());
if let Some(attrs) = def.attrs(self.db) { if let Some(attrs) = def.attrs(self.db) {
for alias in attrs.doc_aliases() { for alias in attrs.doc_aliases() {
self.symbols.push(FileSymbol { self.symbols.insert(FileSymbol {
name: alias.as_str().into(), name: alias.clone(),
def, def,
loc: dec_loc.clone(), loc: dec_loc.clone(),
container_name: self.current_container_name.clone(), container_name: self.current_container_name.clone(),
@ -340,8 +425,8 @@ impl<'a> SymbolCollector<'a> {
} }
} }
self.symbols.push(FileSymbol { self.symbols.insert(FileSymbol {
name: name_node.text().into(), name: name.symbol().clone(),
def: ModuleDef::Module(module_id.into()), def: ModuleDef::Module(module_id.into()),
container_name: self.current_container_name.clone(), container_name: self.current_container_name.clone(),
loc: dec_loc, loc: dec_loc,

View file

@ -5,7 +5,7 @@ use ide_db::imports::{
insert_use::ImportScope, insert_use::ImportScope,
}; };
use itertools::Itertools; use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode, ToSmolStr, T}; use syntax::{ast, AstNode, SyntaxNode, ToSmolStr};
use crate::{ use crate::{
config::AutoImportExclusionType, config::AutoImportExclusionType,
@ -403,10 +403,11 @@ fn import_on_the_fly_method(
fn import_name(ctx: &CompletionContext<'_>) -> String { fn import_name(ctx: &CompletionContext<'_>) -> String {
let token_kind = ctx.token.kind(); let token_kind = ctx.token.kind();
if matches!(token_kind, T![.] | T![::]) {
String::new() if token_kind.is_any_identifier() {
} else {
ctx.token.to_string() ctx.token.to_string()
} else {
String::new()
} }
} }

View file

@ -423,7 +423,7 @@ fn render_resolution_path(
let name = local_name.display_no_db(ctx.completion.edition).to_smolstr(); let name = local_name.display_no_db(ctx.completion.edition).to_smolstr();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution); let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
if local_name.is_escaped(completion.edition) { if local_name.needs_escape(completion.edition) {
item.insert_text(local_name.display_no_db(completion.edition).to_smolstr()); item.insert_text(local_name.display_no_db(completion.edition).to_smolstr());
} }
// Add `<>` for generic types // Add `<>` for generic types

View file

@ -1746,7 +1746,7 @@ fn intrinsics() {
fn function() { fn function() {
transmute$0 transmute$0
} }
"#, "#,
expect![[r#" expect![[r#"
fn transmute() (use core::mem::transmute) unsafe fn(Src) -> Dst fn transmute() (use core::mem::transmute) unsafe fn(Src) -> Dst
"#]], "#]],
@ -1767,7 +1767,9 @@ fn function() {
mem::transmute$0 mem::transmute$0
} }
"#, "#,
expect![""], expect![[r#"
fn transmute() (use core::mem) unsafe fn(Src) -> Dst
"#]],
); );
} }

View file

@ -1,15 +1,17 @@
//! Look up accessible paths for items. //! Look up accessible paths for items.
use std::ops::ControlFlow;
use hir::{ use hir::{
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig, db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
ItemInNs, ModPath, Module, ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type, SemanticsScope, Trait, TyFingerprint, Type,
}; };
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{ use syntax::{
ast::{self, make, HasName}, ast::{self, make, HasName},
AstNode, SmolStr, SyntaxNode, AstNode, SyntaxNode,
}; };
use crate::{ use crate::{
@ -51,7 +53,7 @@ pub struct TraitImportCandidate {
#[derive(Debug)] #[derive(Debug)]
pub struct PathImportCandidate { pub struct PathImportCandidate {
/// Optional qualifier before name. /// Optional qualifier before name.
pub qualifier: Vec<SmolStr>, pub qualifier: Vec<Name>,
/// The name the item (struct, trait, enum, etc.) should have. /// The name the item (struct, trait, enum, etc.) should have.
pub name: NameToImport, pub name: NameToImport,
} }
@ -70,10 +72,18 @@ pub enum NameToImport {
impl NameToImport { impl NameToImport {
pub fn exact_case_sensitive(s: String) -> NameToImport { pub fn exact_case_sensitive(s: String) -> NameToImport {
let s = match s.strip_prefix("r#") {
Some(s) => s.to_owned(),
None => s,
};
NameToImport::Exact(s, true) NameToImport::Exact(s, true)
} }
pub fn fuzzy(s: String) -> NameToImport { pub fn fuzzy(s: String) -> NameToImport {
let s = match s.strip_prefix("r#") {
Some(s) => s.to_owned(),
None => s,
};
// unless all chars are lowercase, we do a case sensitive search // unless all chars are lowercase, we do a case sensitive search
let case_sensitive = s.chars().any(|c| c.is_uppercase()); let case_sensitive = s.chars().any(|c| c.is_uppercase());
NameToImport::Fuzzy(s, case_sensitive) NameToImport::Fuzzy(s, case_sensitive)
@ -350,21 +360,27 @@ fn path_applicable_imports(
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect() .collect()
} }
// we have some unresolved qualifier that we search an import for
// The key here is that whatever we import must form a resolved path for the remainder of
// what follows
// FIXME: This doesn't handle visibility
[first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name( [first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name(
sema, sema,
current_crate, current_crate,
NameToImport::Exact(first_qsegment.to_string(), true), NameToImport::Exact(first_qsegment.as_str().to_owned(), true),
AssocSearchMode::Exclude, AssocSearchMode::Exclude,
) )
.filter_map(|item| { .filter_map(|item| {
import_for_item( // we found imports for `first_qsegment`, now we need to filter these imports by whether
// they result in resolving the rest of the path successfully
validate_resolvable(
sema, sema,
scope, scope,
mod_path, mod_path,
scope_filter,
&path_candidate.name, &path_candidate.name,
item, item,
qualifier_rest, qualifier_rest,
scope_filter,
) )
}) })
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner()) .take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
@ -372,14 +388,16 @@ fn path_applicable_imports(
} }
} }
fn import_for_item( /// Validates and builds an import for `resolved_qualifier` if the `unresolved_qualifier` appended
/// to it resolves and there is a validate `candidate` after that.
fn validate_resolvable(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
scope: &SemanticsScope<'_>, scope: &SemanticsScope<'_>,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>, mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(ItemInNs) -> bool,
candidate: &NameToImport, candidate: &NameToImport,
resolved_qualifier: ItemInNs, resolved_qualifier: ItemInNs,
unresolved_qualifier: &[SmolStr], unresolved_qualifier: &[Name],
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> { ) -> Option<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::import_for_item").entered(); let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
@ -410,8 +428,11 @@ fn import_for_item(
module, module,
candidate.clone(), candidate.clone(),
AssocSearchMode::Exclude, AssocSearchMode::Exclude,
|it| match scope_filter(it) {
true => ControlFlow::Break(it),
false => ControlFlow::Continue(()),
},
) )
.find(|&it| scope_filter(it))
.map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item)) .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item))
} }
// FIXME // FIXME
@ -709,7 +730,7 @@ fn path_import_candidate(
if qualifier.first_qualifier().is_none_or(|it| sema.resolve_path(&it).is_none()) { if qualifier.first_qualifier().is_none_or(|it| sema.resolve_path(&it).is_none()) {
let qualifier = qualifier let qualifier = qualifier
.segments() .segments()
.map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text()))) .map(|seg| seg.name_ref().map(|name| Name::new_root(&name.text())))
.collect::<Option<Vec<_>>>()?; .collect::<Option<Vec<_>>>()?;
ImportCandidate::Path(PathImportCandidate { qualifier, name }) ImportCandidate::Path(PathImportCandidate { qualifier, name })
} else { } else {

View file

@ -2,6 +2,8 @@
//! by its name and a few criteria. //! by its name and a few criteria.
//! The main reason for this module to exist is the fact that project's items and dependencies' items //! The main reason for this module to exist is the fact that project's items and dependencies' items
//! are located in different caches, with different APIs. //! are located in different caches, with different APIs.
use std::ops::ControlFlow;
use either::Either; use either::Either;
use hir::{import_map, Crate, ItemInNs, Module, Semantics}; use hir::{import_map, Crate, ItemInNs, Module, Semantics};
use limit::Limit; use limit::Limit;
@ -17,6 +19,7 @@ pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
pub use import_map::AssocSearchMode; pub use import_map::AssocSearchMode;
// FIXME: Do callbacks instead to avoid allocations.
/// Searches for importable items with the given name in the crate and its dependencies. /// Searches for importable items with the given name in the crate and its dependencies.
pub fn items_with_name<'a>( pub fn items_with_name<'a>(
sema: &'a Semantics<'_, RootDatabase>, sema: &'a Semantics<'_, RootDatabase>,
@ -70,12 +73,13 @@ pub fn items_with_name<'a>(
} }
/// Searches for importable items with the given name in the crate and its dependencies. /// Searches for importable items with the given name in the crate and its dependencies.
pub fn items_with_name_in_module<'a>( pub fn items_with_name_in_module<T>(
sema: &'a Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
module: Module, module: Module,
name: NameToImport, name: NameToImport,
assoc_item_search: AssocSearchMode, assoc_item_search: AssocSearchMode,
) -> impl Iterator<Item = ItemInNs> + 'a { mut cb: impl FnMut(ItemInNs) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module) let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module)
.entered(); .entered();
@ -107,14 +111,12 @@ pub fn items_with_name_in_module<'a>(
local_query local_query
} }
}; };
let mut local_results = Vec::new();
local_query.search(&[sema.db.module_symbols(module)], |local_candidate| { local_query.search(&[sema.db.module_symbols(module)], |local_candidate| {
local_results.push(match local_candidate.def { cb(match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def), def => ItemInNs::from(def),
}) })
}); })
local_results.into_iter()
} }
fn find_items<'a>( fn find_items<'a>(
@ -142,7 +144,8 @@ fn find_items<'a>(
local_results.push(match local_candidate.def { local_results.push(match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def), def => ItemInNs::from(def),
}) });
ControlFlow::<()>::Continue(())
}); });
local_results.into_iter().chain(external_importables) local_results.into_iter().chain(external_importables)
} }

View file

@ -25,6 +25,7 @@ use std::{
fmt, fmt,
hash::{Hash, Hasher}, hash::{Hash, Hasher},
mem, mem,
ops::ControlFlow,
}; };
use base_db::{ use base_db::{
@ -136,16 +137,13 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
// the module or crate indices for those in salsa unless we need to. // the module or crate indices for those in salsa unless we need to.
.for_each(|module| symbol_collector.collect(module)); .for_each(|module| symbol_collector.collect(module));
let mut symbols = symbol_collector.finish(); Arc::new(SymbolIndex::new(symbol_collector.finish()))
symbols.shrink_to_fit();
Arc::new(SymbolIndex::new(symbols))
} }
fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> { fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
let _p = tracing::info_span!("module_symbols").entered(); let _p = tracing::info_span!("module_symbols").entered();
let symbols = SymbolCollector::collect_module(db.upcast(), module); Arc::new(SymbolIndex::new(SymbolCollector::collect_module(db.upcast(), module)))
Arc::new(SymbolIndex::new(symbols))
} }
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> { pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
@ -222,13 +220,16 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
}; };
let mut res = vec![]; let mut res = vec![];
query.search(&indices, |f| res.push(f.clone())); query.search::<()>(&indices, |f| {
res.push(f.clone());
ControlFlow::Continue(())
});
res res
} }
#[derive(Default)] #[derive(Default)]
pub struct SymbolIndex { pub struct SymbolIndex {
symbols: Vec<FileSymbol>, symbols: Box<[FileSymbol]>,
map: fst::Map<Vec<u8>>, map: fst::Map<Vec<u8>>,
} }
@ -253,10 +254,10 @@ impl Hash for SymbolIndex {
} }
impl SymbolIndex { impl SymbolIndex {
fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { fn new(mut symbols: Box<[FileSymbol]>) -> SymbolIndex {
fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering { fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering {
let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase()); let lhs_chars = lhs.name.as_str().chars().map(|c| c.to_ascii_lowercase());
let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase()); let rhs_chars = rhs.name.as_str().chars().map(|c| c.to_ascii_lowercase());
lhs_chars.cmp(rhs_chars) lhs_chars.cmp(rhs_chars)
} }
@ -316,11 +317,11 @@ impl SymbolIndex {
} }
impl Query { impl Query {
pub(crate) fn search<'sym>( pub(crate) fn search<'sym, T>(
self, self,
indices: &'sym [Arc<SymbolIndex>], indices: &'sym [Arc<SymbolIndex>],
cb: impl FnMut(&'sym FileSymbol), cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>,
) { ) -> Option<T> {
let _p = tracing::info_span!("symbol_index::Query::search").entered(); let _p = tracing::info_span!("symbol_index::Query::search").entered();
let mut op = fst::map::OpBuilder::new(); let mut op = fst::map::OpBuilder::new();
match self.mode { match self.mode {
@ -351,12 +352,12 @@ impl Query {
} }
} }
fn search_maps<'sym>( fn search_maps<'sym, T>(
&self, &self,
indices: &'sym [Arc<SymbolIndex>], indices: &'sym [Arc<SymbolIndex>],
mut stream: fst::map::Union<'_>, mut stream: fst::map::Union<'_>,
mut cb: impl FnMut(&'sym FileSymbol), mut cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>,
) { ) -> Option<T> {
let ignore_underscore_prefixed = !self.query.starts_with("__"); let ignore_underscore_prefixed = !self.query.starts_with("__");
while let Some((_, indexed_values)) = stream.next() { while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index, value } in indexed_values { for &IndexedValue { index, value } in indexed_values {
@ -377,15 +378,19 @@ impl Query {
continue; continue;
} }
// Hide symbols that start with `__` unless the query starts with `__` // Hide symbols that start with `__` unless the query starts with `__`
if ignore_underscore_prefixed && symbol.name.starts_with("__") { let symbol_name = symbol.name.as_str();
if ignore_underscore_prefixed && symbol_name.starts_with("__") {
continue; continue;
} }
if self.mode.check(&self.query, self.case_sensitive, &symbol.name) { if self.mode.check(&self.query, self.case_sensitive, symbol_name) {
cb(symbol); if let Some(b) = cb(symbol).break_value() {
return Some(b);
}
} }
} }
} }
} }
None
} }
fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool { fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool {
@ -476,9 +481,9 @@ use Macro as ItemLikeMacro;
use Macro as Trait; // overlay namespaces use Macro as Trait; // overlay namespaces
//- /b_mod.rs //- /b_mod.rs
struct StructInModB; struct StructInModB;
use super::Macro as SuperItemLikeMacro; pub(self) use super::Macro as SuperItemLikeMacro;
use crate::b_mod::StructInModB as ThisStruct; pub(self) use crate::b_mod::StructInModB as ThisStruct;
use crate::Trait as IsThisJustATrait; pub(self) use crate::Trait as IsThisJustATrait;
"#, "#,
); );
@ -487,7 +492,7 @@ use crate::Trait as IsThisJustATrait;
.into_iter() .into_iter()
.map(|module_id| { .map(|module_id| {
let mut symbols = SymbolCollector::collect_module(&db, module_id); let mut symbols = SymbolCollector::collect_module(&db, module_id);
symbols.sort_by_key(|it| it.name.clone()); symbols.sort_by_key(|it| it.name.as_str().to_owned());
(module_id, symbols) (module_id, symbols)
}) })
.collect(); .collect();
@ -514,7 +519,7 @@ struct Duplicate;
.into_iter() .into_iter()
.map(|module_id| { .map(|module_id| {
let mut symbols = SymbolCollector::collect_module(&db, module_id); let mut symbols = SymbolCollector::collect_module(&db, module_id);
symbols.sort_by_key(|it| it.name.clone()); symbols.sort_by_key(|it| it.name.as_str().to_owned());
(module_id, symbols) (module_id, symbols)
}) })
.collect(); .collect();

View file

@ -631,7 +631,7 @@
def: Function( def: Function(
Function { Function {
id: FunctionId( id: FunctionId(
3, 2,
), ),
}, },
), ),
@ -664,7 +664,7 @@
def: Function( def: Function(
Function { Function {
id: FunctionId( id: FunctionId(
2, 1,
), ),
}, },
), ),
@ -794,7 +794,7 @@
def: Function( def: Function(
Function { Function {
id: FunctionId( id: FunctionId(
1, 3,
), ),
}, },
), ),
@ -877,6 +877,37 @@
}, },
}, },
[ [
FileSymbol {
name: "IsThisJustATrait",
def: Trait(
Trait {
id: TraitId(
0,
),
},
),
loc: DeclarationLocation {
hir_file_id: EditionedFileId(
FileId(
1,
),
Edition2021,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 141..173,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
range: 157..173,
},
),
},
container_name: None,
is_alias: false,
is_assoc: false,
},
FileSymbol { FileSymbol {
name: "IsThisJustATrait", name: "IsThisJustATrait",
def: Macro( def: Macro(
@ -897,12 +928,12 @@
), ),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 111..143, range: 141..173,
}, },
name_ptr: AstPtr( name_ptr: AstPtr(
SyntaxNodePtr { SyntaxNodePtr {
kind: NAME, kind: NAME,
range: 127..143, range: 157..173,
}, },
), ),
}, },
@ -963,78 +994,12 @@
), ),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 25..59, range: 35..69,
}, },
name_ptr: AstPtr( name_ptr: AstPtr(
SyntaxNodePtr { SyntaxNodePtr {
kind: NAME, kind: NAME,
range: 41..59, range: 51..69,
},
),
},
container_name: None,
is_alias: false,
is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
def: Adt(
Struct(
Struct {
id: StructId(
4,
),
},
),
),
loc: DeclarationLocation {
hir_file_id: EditionedFileId(
FileId(
1,
),
Edition2021,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
range: 95..105,
},
),
},
container_name: None,
is_alias: false,
is_assoc: false,
},
FileSymbol {
name: "ThisStruct",
def: Adt(
Struct(
Struct {
id: StructId(
4,
),
},
),
),
loc: DeclarationLocation {
hir_file_id: EditionedFileId(
FileId(
1,
),
Edition2021,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
range: 65..105,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
range: 95..105,
}, },
), ),
}, },

View file

@ -44,13 +44,16 @@ pub struct NavigationTarget {
/// ///
/// This range must be contained within [`Self::full_range`]. /// This range must be contained within [`Self::full_range`].
pub focus_range: Option<TextRange>, pub focus_range: Option<TextRange>,
// FIXME: Symbol
pub name: SmolStr, pub name: SmolStr,
pub kind: Option<SymbolKind>, pub kind: Option<SymbolKind>,
// FIXME: Symbol
pub container_name: Option<SmolStr>, pub container_name: Option<SmolStr>,
pub description: Option<String>, pub description: Option<String>,
pub docs: Option<Documentation>, pub docs: Option<Documentation>,
/// In addition to a `name` field, a `NavigationTarget` may also be aliased /// In addition to a `name` field, a `NavigationTarget` may also be aliased
/// In such cases we want a `NavigationTarget` to be accessible by its alias /// In such cases we want a `NavigationTarget` to be accessible by its alias
// FIXME: Symbol
pub alias: Option<SmolStr>, pub alias: Option<SmolStr>,
} }
@ -191,10 +194,10 @@ impl TryToNav for FileSymbol {
NavigationTarget { NavigationTarget {
file_id, file_id,
name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else( name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else(
|| self.name.clone(), || self.name.as_str().into(),
|it| it.display_no_db(edition).to_smolstr(), |it| it.display_no_db(edition).to_smolstr(),
), ),
alias: self.is_alias.then(|| self.name.clone()), alias: self.is_alias.then(|| self.name.as_str().into()),
kind: Some(self.def.into()), kind: Some(self.def.into()),
full_range, full_range,
focus_range, focus_range,