Merge pull request #18967 from Veykril/push-pwonkmwqmmol
Properly record meaningful imports as re-exports in symbol index
This commit is contained in:
commit
1eb9d15e42
25 changed files with 382 additions and 279 deletions
|
@ -523,6 +523,7 @@ dependencies = [
|
|||
"hir-def",
|
||||
"hir-expand",
|
||||
"hir-ty",
|
||||
"indexmap",
|
||||
"intern",
|
||||
"itertools",
|
||||
"rustc-hash 2.0.0",
|
||||
|
|
|
@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
|
|||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
rust-version = "1.82"
|
||||
rust-version = "1.83"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
authors = ["rust-analyzer team"]
|
||||
|
|
|
@ -244,7 +244,7 @@ bitflags::bitflags! {
|
|||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TraitData {
|
||||
pub name: Name,
|
||||
pub items: Vec<(Name, AssocItemId)>,
|
||||
pub items: Box<[(Name, AssocItemId)]>,
|
||||
pub flags: TraitFlags,
|
||||
pub visibility: RawVisibility,
|
||||
// box it as the vec is usually empty anyways
|
||||
|
@ -360,7 +360,7 @@ impl TraitAliasData {
|
|||
pub struct ImplData {
|
||||
pub target_trait: Option<TraitRef>,
|
||||
pub self_ty: TypeRefId,
|
||||
pub items: Box<[AssocItemId]>,
|
||||
pub items: Box<[(Name, AssocItemId)]>,
|
||||
pub is_negative: bool,
|
||||
pub is_unsafe: bool,
|
||||
// box it as the vec is usually empty anyways
|
||||
|
@ -393,7 +393,6 @@ impl ImplData {
|
|||
collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
|
||||
|
||||
let (items, macro_calls, diagnostics) = collector.finish();
|
||||
let items = items.into_iter().map(|(_, item)| item).collect();
|
||||
|
||||
(
|
||||
Arc::new(ImplData {
|
||||
|
@ -648,12 +647,12 @@ impl<'a> AssocItemCollector<'a> {
|
|||
fn finish(
|
||||
self,
|
||||
) -> (
|
||||
Vec<(Name, AssocItemId)>,
|
||||
Box<[(Name, AssocItemId)]>,
|
||||
Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
|
||||
Vec<DefDiagnostic>,
|
||||
) {
|
||||
(
|
||||
self.items,
|
||||
self.items.into_boxed_slice(),
|
||||
if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
|
||||
self.diagnostics,
|
||||
)
|
||||
|
|
|
@ -162,6 +162,20 @@ impl ItemScope {
|
|||
.map(move |name| (name, self.get(name)))
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportId>)> + '_ {
|
||||
self.values.iter().map(|(n, &i)| (n, i))
|
||||
}
|
||||
|
||||
pub fn types(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportOrExternCrate>)> + '_ {
|
||||
self.types.iter().map(|(n, &i)| (n, i))
|
||||
}
|
||||
|
||||
pub fn macros(&self) -> impl Iterator<Item = (&Name, Item<MacroId, ImportId>)> + '_ {
|
||||
self.macros.iter().map(|(n, &i)| (n, i))
|
||||
}
|
||||
|
||||
pub fn imports(&self) -> impl Iterator<Item = ImportId> + '_ {
|
||||
self.use_imports_types
|
||||
.keys()
|
||||
|
@ -263,11 +277,6 @@ impl ItemScope {
|
|||
self.unnamed_consts.iter().copied()
|
||||
}
|
||||
|
||||
/// Iterate over all module scoped macros
|
||||
pub(crate) fn macros(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
|
||||
self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
|
||||
}
|
||||
|
||||
/// Iterate over all legacy textual scoped macros visible at the end of the module
|
||||
pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ {
|
||||
self.legacy_macros.iter().map(|(name, def)| (name, &**def))
|
||||
|
|
|
@ -107,7 +107,7 @@ impl LangItems {
|
|||
for (_, module_data) in crate_def_map.modules() {
|
||||
for impl_def in module_data.scope.impls() {
|
||||
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
|
||||
for assoc in db.impl_data(impl_def).items.iter().copied() {
|
||||
for &(_, assoc) in db.impl_data(impl_def).items.iter() {
|
||||
match assoc {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function)
|
||||
|
|
|
@ -502,7 +502,7 @@ impl ModuleId {
|
|||
}
|
||||
|
||||
/// Whether this module represents the crate root module
|
||||
fn is_crate_root(&self) -> bool {
|
||||
pub fn is_crate_root(&self) -> bool {
|
||||
self.local_id == DefMap::ROOT && self.block.is_none()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -717,8 +717,8 @@ impl DefCollector<'_> {
|
|||
}
|
||||
}
|
||||
None => {
|
||||
for (name, def) in root_scope.macros() {
|
||||
self.def_map.macro_use_prelude.insert(name.clone(), (def, extern_crate));
|
||||
for (name, it) in root_scope.macros() {
|
||||
self.def_map.macro_use_prelude.insert(name.clone(), (it.def, extern_crate));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -240,12 +240,12 @@ impl Visibility {
|
|||
|
||||
if a_ancestors.any(|m| m == mod_b.local_id) {
|
||||
// B is above A
|
||||
return Some(Visibility::Module(mod_a, expl_b));
|
||||
return Some(Visibility::Module(mod_a, expl_a));
|
||||
}
|
||||
|
||||
if b_ancestors.any(|m| m == mod_a.local_id) {
|
||||
// A is above B
|
||||
return Some(Visibility::Module(mod_b, expl_a));
|
||||
return Some(Visibility::Module(mod_b, expl_b));
|
||||
}
|
||||
|
||||
None
|
||||
|
|
|
@ -11,7 +11,7 @@ use syntax::utils::is_raw_identifier;
|
|||
/// and declarations. In theory, names should also carry hygiene info, but we are
|
||||
/// not there yet!
|
||||
///
|
||||
/// Note that the rawness (`r#`) of names does not depend on whether they are written raw.
|
||||
/// Note that the rawness (`r#`) of names is not preserved. Names are always stored without a `r#` prefix.
|
||||
/// This is because we want to show (in completions etc.) names as raw depending on the needs
|
||||
/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining
|
||||
/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well.
|
||||
|
@ -77,6 +77,7 @@ impl Name {
|
|||
/// Hopefully, this should allow us to integrate hygiene cleaner in the
|
||||
/// future, and to switch to interned representation of names.
|
||||
fn new_text(text: &str) -> Name {
|
||||
debug_assert!(!text.starts_with("r#"));
|
||||
Name { symbol: Symbol::intern(text), ctx: () }
|
||||
}
|
||||
|
||||
|
@ -91,15 +92,34 @@ impl Name {
|
|||
|
||||
pub fn new_root(text: &str) -> Name {
|
||||
// The edition doesn't matter for hygiene.
|
||||
Self::new(text, SyntaxContextId::root(Edition::Edition2015))
|
||||
Self::new(text.trim_start_matches("r#"), SyntaxContextId::root(Edition::Edition2015))
|
||||
}
|
||||
|
||||
pub fn new_tuple_field(idx: usize) -> Name {
|
||||
Name { symbol: Symbol::intern(&idx.to_string()), ctx: () }
|
||||
let symbol = match idx {
|
||||
0 => sym::INTEGER_0.clone(),
|
||||
1 => sym::INTEGER_1.clone(),
|
||||
2 => sym::INTEGER_2.clone(),
|
||||
3 => sym::INTEGER_3.clone(),
|
||||
4 => sym::INTEGER_4.clone(),
|
||||
5 => sym::INTEGER_5.clone(),
|
||||
6 => sym::INTEGER_6.clone(),
|
||||
7 => sym::INTEGER_7.clone(),
|
||||
8 => sym::INTEGER_8.clone(),
|
||||
9 => sym::INTEGER_9.clone(),
|
||||
10 => sym::INTEGER_10.clone(),
|
||||
11 => sym::INTEGER_11.clone(),
|
||||
12 => sym::INTEGER_12.clone(),
|
||||
13 => sym::INTEGER_13.clone(),
|
||||
14 => sym::INTEGER_14.clone(),
|
||||
15 => sym::INTEGER_15.clone(),
|
||||
_ => Symbol::intern(&idx.to_string()),
|
||||
};
|
||||
Name { symbol, ctx: () }
|
||||
}
|
||||
|
||||
pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
|
||||
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () }
|
||||
Self::new_text(lt.text().as_str().trim_start_matches("r#"))
|
||||
}
|
||||
|
||||
/// Resolve a name from the text of token.
|
||||
|
@ -142,15 +162,18 @@ impl Name {
|
|||
}
|
||||
|
||||
/// Returns the text this name represents if it isn't a tuple field.
|
||||
///
|
||||
/// Do not use this for user-facing text, use `display` instead to handle editions properly.
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.symbol.as_str()
|
||||
}
|
||||
|
||||
// FIXME: Remove this
|
||||
pub fn unescaped(&self) -> UnescapedName<'_> {
|
||||
UnescapedName(self)
|
||||
}
|
||||
|
||||
pub fn is_escaped(&self, edition: Edition) -> bool {
|
||||
pub fn needs_escape(&self, edition: Edition) -> bool {
|
||||
is_raw_identifier(self.symbol.as_str(), edition)
|
||||
}
|
||||
|
||||
|
@ -173,16 +196,19 @@ impl Name {
|
|||
&self.symbol
|
||||
}
|
||||
|
||||
pub const fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
|
||||
pub fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
|
||||
debug_assert!(!symbol.as_str().starts_with("r#"));
|
||||
_ = ctx;
|
||||
Self { symbol, ctx: () }
|
||||
}
|
||||
|
||||
// FIXME: This needs to go once we have hygiene
|
||||
pub const fn new_symbol_root(sym: Symbol) -> Self {
|
||||
pub fn new_symbol_root(sym: Symbol) -> Self {
|
||||
debug_assert!(!sym.as_str().starts_with("r#"));
|
||||
Self { symbol: sym, ctx: () }
|
||||
}
|
||||
|
||||
// FIXME: Remove this
|
||||
#[inline]
|
||||
pub fn eq_ident(&self, ident: &str) -> bool {
|
||||
self.as_str() == ident.trim_start_matches("r#")
|
||||
|
|
|
@ -856,7 +856,7 @@ fn impl_def_datum(
|
|||
let associated_ty_value_ids = impl_data
|
||||
.items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
.filter_map(|(_, item)| match item {
|
||||
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
|
||||
_ => None,
|
||||
})
|
||||
|
|
|
@ -746,16 +746,9 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
|||
let table = InferenceTable::new(db, env);
|
||||
|
||||
let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
|
||||
let item = impl_data.items.iter().find_map(|&it| match it {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
|
||||
}
|
||||
AssocItemId::ConstId(c) => db
|
||||
.const_data(c)
|
||||
.name
|
||||
.as_ref()
|
||||
.map(|n| n == name)
|
||||
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
|
||||
let item = impl_data.items.iter().find_map(|(n, it)| match *it {
|
||||
AssocItemId::FunctionId(f) => (n == name).then_some(AssocItemId::FunctionId(f)),
|
||||
AssocItemId::ConstId(c) => (n == name).then_some(AssocItemId::ConstId(c)),
|
||||
AssocItemId::TypeAliasId(_) => None,
|
||||
})?;
|
||||
Some((item, impl_subst))
|
||||
|
@ -850,7 +843,7 @@ fn is_inherent_impl_coherent(
|
|||
};
|
||||
rustc_has_incoherent_inherent_impls
|
||||
&& !impl_data.items.is_empty()
|
||||
&& impl_data.items.iter().copied().all(|assoc| match assoc {
|
||||
&& impl_data.items.iter().all(|&(_, assoc)| match assoc {
|
||||
AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
|
||||
AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
|
||||
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
|
||||
|
@ -1399,7 +1392,7 @@ fn iterate_inherent_methods(
|
|||
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
|
||||
) -> ControlFlow<()> {
|
||||
for &impl_id in impls.for_self_ty(self_ty) {
|
||||
for &item in table.db.impl_data(impl_id).items.iter() {
|
||||
for &(ref item_name, item) in table.db.impl_data(impl_id).items.iter() {
|
||||
let visible = match is_valid_impl_method_candidate(
|
||||
table,
|
||||
self_ty,
|
||||
|
@ -1408,6 +1401,7 @@ fn iterate_inherent_methods(
|
|||
name,
|
||||
impl_id,
|
||||
item,
|
||||
item_name,
|
||||
) {
|
||||
IsValidCandidate::Yes => true,
|
||||
IsValidCandidate::NotVisible => false,
|
||||
|
@ -1467,6 +1461,7 @@ fn is_valid_impl_method_candidate(
|
|||
name: Option<&Name>,
|
||||
impl_id: ImplId,
|
||||
item: AssocItemId,
|
||||
item_name: &Name,
|
||||
) -> IsValidCandidate {
|
||||
match item {
|
||||
AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate(
|
||||
|
@ -1477,11 +1472,12 @@ fn is_valid_impl_method_candidate(
|
|||
receiver_ty,
|
||||
self_ty,
|
||||
visible_from_module,
|
||||
item_name,
|
||||
),
|
||||
AssocItemId::ConstId(c) => {
|
||||
let db = table.db;
|
||||
check_that!(receiver_ty.is_none());
|
||||
check_that!(name.is_none_or(|n| db.const_data(c).name.as_ref() == Some(n)));
|
||||
check_that!(name.is_none_or(|n| n == item_name));
|
||||
|
||||
if let Some(from_module) = visible_from_module {
|
||||
if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
|
||||
|
@ -1565,11 +1561,13 @@ fn is_valid_impl_fn_candidate(
|
|||
receiver_ty: Option<&Ty>,
|
||||
self_ty: &Ty,
|
||||
visible_from_module: Option<ModuleId>,
|
||||
item_name: &Name,
|
||||
) -> IsValidCandidate {
|
||||
check_that!(name.is_none_or(|n| n == item_name));
|
||||
|
||||
let db = table.db;
|
||||
let data = db.function_data(fn_id);
|
||||
|
||||
check_that!(name.is_none_or(|n| n == &data.name));
|
||||
if let Some(from_module) = visible_from_module {
|
||||
if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) {
|
||||
cov_mark::hit!(autoderef_candidate_not_visible);
|
||||
|
|
|
@ -435,7 +435,7 @@ pub(crate) fn visit_module(
|
|||
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
|
||||
for impl_id in crate_def_map[module_id].scope.impls() {
|
||||
let impl_data = db.impl_data(impl_id);
|
||||
for &item in impl_data.items.iter() {
|
||||
for &(_, item) in impl_data.items.iter() {
|
||||
match item {
|
||||
AssocItemId::FunctionId(it) => {
|
||||
let body = db.body(it.into());
|
||||
|
|
|
@ -20,6 +20,7 @@ itertools.workspace = true
|
|||
smallvec.workspace = true
|
||||
tracing.workspace = true
|
||||
triomphe.workspace = true
|
||||
indexmap.workspace = true
|
||||
|
||||
# local deps
|
||||
base-db.workspace = true
|
||||
|
|
|
@ -775,29 +775,16 @@ impl Module {
|
|||
AssocItemId::ConstId(id) => !db.const_data(id).has_body,
|
||||
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
|
||||
});
|
||||
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
|
||||
|&item| {
|
||||
Some((
|
||||
item,
|
||||
match item {
|
||||
AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
|
||||
AssocItemId::ConstId(it) => {
|
||||
db.const_data(it).name.as_ref()?.clone()
|
||||
}
|
||||
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
|
||||
},
|
||||
))
|
||||
},
|
||||
));
|
||||
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().cloned());
|
||||
|
||||
let redundant = impl_assoc_items_scratch
|
||||
.iter()
|
||||
.filter(|(id, name)| {
|
||||
.filter(|(name, id)| {
|
||||
!items.iter().any(|(impl_name, impl_item)| {
|
||||
discriminant(impl_item) == discriminant(id) && impl_name == name
|
||||
})
|
||||
})
|
||||
.map(|(item, name)| (name.clone(), AssocItem::from(*item)));
|
||||
.map(|(name, item)| (name.clone(), AssocItem::from(*item)));
|
||||
for (name, assoc_item) in redundant {
|
||||
acc.push(
|
||||
TraitImplRedundantAssocItems {
|
||||
|
@ -812,7 +799,7 @@ impl Module {
|
|||
|
||||
let missing: Vec<_> = required_items
|
||||
.filter(|(name, id)| {
|
||||
!impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
|
||||
!impl_assoc_items_scratch.iter().any(|(impl_name, impl_item)| {
|
||||
discriminant(impl_item) == discriminant(id) && impl_name == name
|
||||
})
|
||||
})
|
||||
|
@ -844,7 +831,7 @@ impl Module {
|
|||
source_map,
|
||||
);
|
||||
|
||||
for &item in db.impl_data(impl_def.id).items.iter() {
|
||||
for &(_, item) in db.impl_data(impl_def.id).items.iter() {
|
||||
AssocItem::from(item).diagnostics(db, acc, style_lints);
|
||||
}
|
||||
}
|
||||
|
@ -4307,7 +4294,7 @@ impl Impl {
|
|||
}
|
||||
|
||||
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
|
||||
db.impl_data(self.id).items.iter().map(|&it| it.into()).collect()
|
||||
db.impl_data(self.id).items.iter().map(|&(_, it)| it.into()).collect()
|
||||
}
|
||||
|
||||
pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
|
||||
|
@ -5165,7 +5152,7 @@ impl Type {
|
|||
let impls = db.inherent_impls_in_crate(krate);
|
||||
|
||||
for impl_def in impls.for_self_ty(&self.ty) {
|
||||
for &item in db.impl_data(*impl_def).items.iter() {
|
||||
for &(_, item) in db.impl_data(*impl_def).items.iter() {
|
||||
if callback(item) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -39,8 +39,8 @@ use stdx::TupleExt;
|
|||
use syntax::{
|
||||
algo::skip_trivia_token,
|
||||
ast::{self, HasAttrs as _, HasGenericParams},
|
||||
AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
|
||||
TextRange, TextSize,
|
||||
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
|
||||
TextSize,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
|
||||
|
@ -1591,14 +1591,11 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn resolve_mod_path_relative(
|
||||
&self,
|
||||
to: Module,
|
||||
segments: impl IntoIterator<Item = SmolStr>,
|
||||
segments: impl IntoIterator<Item = Name>,
|
||||
) -> Option<impl Iterator<Item = ItemInNs>> {
|
||||
let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
|
||||
self.db.upcast(),
|
||||
&ModPath::from_segments(
|
||||
hir_def::path::PathKind::Plain,
|
||||
segments.into_iter().map(|it| Name::new_root(&it)),
|
||||
),
|
||||
&ModPath::from_segments(hir_def::path::PathKind::Plain, segments),
|
||||
);
|
||||
Some(items.iter_items().map(|(item, _)| item.into()))
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ impl ChildBySource for ImplId {
|
|||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
},
|
||||
);
|
||||
data.items.iter().for_each(|&item| {
|
||||
data.items.iter().for_each(|&(_, item)| {
|
||||
add_assoc_item(db, res, file_id, item);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,27 +1,34 @@
|
|||
//! File symbol extraction.
|
||||
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
db::DefDatabase,
|
||||
item_scope::ItemInNs,
|
||||
item_scope::{ImportId, ImportOrExternCrate},
|
||||
per_ns::Item,
|
||||
src::{HasChildSource, HasSource},
|
||||
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
|
||||
TraitId,
|
||||
visibility::{Visibility, VisibilityExplicitness},
|
||||
AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
|
||||
ModuleDefId, ModuleId, TraitId,
|
||||
};
|
||||
use hir_expand::HirFileId;
|
||||
use hir_expand::{name::Name, HirFileId};
|
||||
use hir_ty::{
|
||||
db::HirDatabase,
|
||||
display::{hir_display_with_types_map, HirDisplay},
|
||||
};
|
||||
use intern::Symbol;
|
||||
use rustc_hash::FxHashMap;
|
||||
use span::Edition;
|
||||
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
|
||||
|
||||
use crate::{Module, ModuleDef, Semantics};
|
||||
|
||||
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||
|
||||
/// The actual data that is stored in the index. It should be as compact as
|
||||
/// possible.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FileSymbol {
|
||||
pub name: SmolStr,
|
||||
pub name: Symbol,
|
||||
pub def: ModuleDef,
|
||||
pub loc: DeclarationLocation,
|
||||
pub container_name: Option<SmolStr>,
|
||||
|
@ -37,7 +44,7 @@ pub struct DeclarationLocation {
|
|||
/// This points to the whole syntax node of the declaration.
|
||||
pub ptr: SyntaxNodePtr,
|
||||
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
|
||||
pub name_ptr: AstPtr<syntax::ast::Name>,
|
||||
pub name_ptr: AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>,
|
||||
}
|
||||
|
||||
impl DeclarationLocation {
|
||||
|
@ -55,7 +62,7 @@ struct SymbolCollectorWork {
|
|||
|
||||
pub struct SymbolCollector<'a> {
|
||||
db: &'a dyn HirDatabase,
|
||||
symbols: Vec<FileSymbol>,
|
||||
symbols: FxIndexSet<FileSymbol>,
|
||||
work: Vec<SymbolCollectorWork>,
|
||||
current_container_name: Option<SmolStr>,
|
||||
edition: Edition,
|
||||
|
@ -86,11 +93,11 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn finish(self) -> Vec<FileSymbol> {
|
||||
self.symbols
|
||||
pub fn finish(self) -> Box<[FileSymbol]> {
|
||||
self.symbols.into_iter().collect()
|
||||
}
|
||||
|
||||
pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
|
||||
pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Box<[FileSymbol]> {
|
||||
let mut symbol_collector = SymbolCollector::new(db);
|
||||
symbol_collector.collect(module);
|
||||
symbol_collector.finish()
|
||||
|
@ -104,96 +111,174 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
|
||||
fn collect_from_module(&mut self, module_id: ModuleId) {
|
||||
let def_map = module_id.def_map(self.db.upcast());
|
||||
let scope = &def_map[module_id.local_id].scope;
|
||||
|
||||
for module_def_id in scope.declarations() {
|
||||
match module_def_id {
|
||||
ModuleDefId::ModuleId(id) => self.push_module(id),
|
||||
let push_decl = |this: &mut Self, def, name| {
|
||||
match def {
|
||||
ModuleDefId::ModuleId(id) => this.push_module(id, name),
|
||||
ModuleDefId::FunctionId(id) => {
|
||||
self.push_decl(id, false);
|
||||
self.collect_from_body(id);
|
||||
this.push_decl(id, name, false);
|
||||
this.collect_from_body(id);
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
|
||||
ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
|
||||
ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
|
||||
ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false),
|
||||
ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false),
|
||||
ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false),
|
||||
ModuleDefId::ConstId(id) => {
|
||||
self.push_decl(id, false);
|
||||
self.collect_from_body(id);
|
||||
this.push_decl(id, name, false);
|
||||
this.collect_from_body(id);
|
||||
}
|
||||
ModuleDefId::StaticId(id) => {
|
||||
self.push_decl(id, false);
|
||||
self.collect_from_body(id);
|
||||
this.push_decl(id, name, false);
|
||||
this.collect_from_body(id);
|
||||
}
|
||||
ModuleDefId::TraitId(id) => {
|
||||
self.push_decl(id, false);
|
||||
self.collect_from_trait(id);
|
||||
this.push_decl(id, name, false);
|
||||
this.collect_from_trait(id);
|
||||
}
|
||||
ModuleDefId::TraitAliasId(id) => {
|
||||
self.push_decl(id, false);
|
||||
this.push_decl(id, name, false);
|
||||
}
|
||||
ModuleDefId::TypeAliasId(id) => {
|
||||
self.push_decl(id, false);
|
||||
this.push_decl(id, name, false);
|
||||
}
|
||||
ModuleDefId::MacroId(id) => match id {
|
||||
MacroId::Macro2Id(id) => self.push_decl(id, false),
|
||||
MacroId::MacroRulesId(id) => self.push_decl(id, false),
|
||||
MacroId::ProcMacroId(id) => self.push_decl(id, false),
|
||||
MacroId::Macro2Id(id) => this.push_decl(id, name, false),
|
||||
MacroId::MacroRulesId(id) => this.push_decl(id, name, false),
|
||||
MacroId::ProcMacroId(id) => this.push_decl(id, name, false),
|
||||
},
|
||||
// Don't index these.
|
||||
ModuleDefId::BuiltinType(_) => {}
|
||||
ModuleDefId::EnumVariantId(_) => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Nested trees are very common, so a cache here will hit a lot.
|
||||
let import_child_source_cache = &mut FxHashMap::default();
|
||||
|
||||
let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId| {
|
||||
let source = import_child_source_cache
|
||||
.entry(i.import)
|
||||
.or_insert_with(|| i.import.child_source(this.db.upcast()));
|
||||
let Some(use_tree_src) = source.value.get(i.idx) else { return };
|
||||
let Some(name_ptr) = use_tree_src
|
||||
.rename()
|
||||
.and_then(|rename| rename.name())
|
||||
.map(Either::Left)
|
||||
.or_else(|| use_tree_src.path()?.segment()?.name_ref().map(Either::Right))
|
||||
.map(|it| AstPtr::new(&it))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: source.file_id,
|
||||
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
|
||||
name_ptr,
|
||||
};
|
||||
this.symbols.insert(FileSymbol {
|
||||
name: name.symbol().clone(),
|
||||
def: def.into(),
|
||||
container_name: this.current_container_name.clone(),
|
||||
loc: dec_loc,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
});
|
||||
};
|
||||
|
||||
let push_extern_crate =
|
||||
|this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId| {
|
||||
let loc = i.lookup(this.db.upcast());
|
||||
let source = loc.source(this.db.upcast());
|
||||
let Some(name_ptr) = source
|
||||
.value
|
||||
.rename()
|
||||
.and_then(|rename| rename.name())
|
||||
.map(Either::Left)
|
||||
.or_else(|| source.value.name_ref().map(Either::Right))
|
||||
.map(|it| AstPtr::new(&it))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: source.file_id,
|
||||
ptr: SyntaxNodePtr::new(source.value.syntax()),
|
||||
name_ptr,
|
||||
};
|
||||
this.symbols.insert(FileSymbol {
|
||||
name: name.symbol().clone(),
|
||||
def: def.into(),
|
||||
container_name: this.current_container_name.clone(),
|
||||
loc: dec_loc,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
});
|
||||
};
|
||||
|
||||
let is_explicit_import = |vis| {
|
||||
match vis {
|
||||
Visibility::Module(_, VisibilityExplicitness::Explicit) => true,
|
||||
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
|
||||
// consider imports in the crate root explicit, as these are visibly
|
||||
// crate-wide anyways
|
||||
module_id.is_crate_root()
|
||||
}
|
||||
Visibility::Public => true,
|
||||
}
|
||||
};
|
||||
|
||||
let def_map = module_id.def_map(self.db.upcast());
|
||||
let scope = &def_map[module_id.local_id].scope;
|
||||
|
||||
for impl_id in scope.impls() {
|
||||
self.collect_from_impl(impl_id);
|
||||
}
|
||||
|
||||
// Record renamed imports.
|
||||
// FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily
|
||||
// for now.
|
||||
for id in scope.imports() {
|
||||
let source = id.import.child_source(self.db.upcast());
|
||||
let Some(use_tree_src) = source.value.get(id.idx) else { continue };
|
||||
let Some(rename) = use_tree_src.rename() else { continue };
|
||||
let Some(name) = rename.name() else { continue };
|
||||
|
||||
let res = scope.fully_resolve_import(self.db.upcast(), id);
|
||||
res.iter_items().for_each(|(item, _)| {
|
||||
let def = match item {
|
||||
ItemInNs::Types(def) | ItemInNs::Values(def) => def,
|
||||
ItemInNs::Macros(def) => ModuleDefId::from(def),
|
||||
for (name, Item { def, vis, import }) in scope.types() {
|
||||
if let Some(i) = import {
|
||||
if is_explicit_import(vis) {
|
||||
match i {
|
||||
ImportOrExternCrate::Import(i) => push_import(self, i, name, def),
|
||||
ImportOrExternCrate::ExternCrate(i) => {
|
||||
push_extern_crate(self, i, name, def)
|
||||
}
|
||||
}
|
||||
}
|
||||
.into();
|
||||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: source.file_id,
|
||||
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
|
||||
name_ptr: AstPtr::new(&name),
|
||||
};
|
||||
continue;
|
||||
}
|
||||
// self is a declaration
|
||||
push_decl(self, def, name)
|
||||
}
|
||||
|
||||
self.symbols.push(FileSymbol {
|
||||
name: name.text().into(),
|
||||
def,
|
||||
container_name: self.current_container_name.clone(),
|
||||
loc: dec_loc,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
});
|
||||
});
|
||||
for (name, Item { def, vis, import }) in scope.macros() {
|
||||
if let Some(i) = import {
|
||||
if is_explicit_import(vis) {
|
||||
push_import(self, i, name, def.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// self is a declaration
|
||||
push_decl(self, def.into(), name)
|
||||
}
|
||||
|
||||
for (name, Item { def, vis, import }) in scope.values() {
|
||||
if let Some(i) = import {
|
||||
if is_explicit_import(vis) {
|
||||
push_import(self, i, name, def);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// self is a declaration
|
||||
push_decl(self, def, name)
|
||||
}
|
||||
|
||||
for const_id in scope.unnamed_consts() {
|
||||
self.collect_from_body(const_id);
|
||||
}
|
||||
|
||||
for (_, id) in scope.legacy_macros() {
|
||||
for (name, id) in scope.legacy_macros() {
|
||||
for &id in id {
|
||||
if id.module(self.db.upcast()) == module_id {
|
||||
match id {
|
||||
MacroId::Macro2Id(id) => self.push_decl(id, false),
|
||||
MacroId::MacroRulesId(id) => self.push_decl(id, false),
|
||||
MacroId::ProcMacroId(id) => self.push_decl(id, false),
|
||||
MacroId::Macro2Id(id) => self.push_decl(id, name, false),
|
||||
MacroId::MacroRulesId(id) => self.push_decl(id, name, false),
|
||||
MacroId::ProcMacroId(id) => self.push_decl(id, name, false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -223,8 +308,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
.to_smolstr(),
|
||||
);
|
||||
self.with_container_name(impl_name, |s| {
|
||||
for &assoc_item_id in impl_data.items.iter() {
|
||||
s.push_assoc_item(assoc_item_id)
|
||||
for &(ref name, assoc_item_id) in &impl_data.items {
|
||||
s.push_assoc_item(assoc_item_id, name)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -232,8 +317,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
fn collect_from_trait(&mut self, trait_id: TraitId) {
|
||||
let trait_data = self.db.trait_data(trait_id);
|
||||
self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
|
||||
for &(_, assoc_item_id) in &trait_data.items {
|
||||
s.push_assoc_item(assoc_item_id);
|
||||
for &(ref name, assoc_item_id) in &trait_data.items {
|
||||
s.push_assoc_item(assoc_item_id, name);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -266,15 +351,15 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
|
||||
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) {
|
||||
match assoc_item_id {
|
||||
AssocItemId::FunctionId(id) => self.push_decl(id, true),
|
||||
AssocItemId::ConstId(id) => self.push_decl(id, true),
|
||||
AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
|
||||
AssocItemId::FunctionId(id) => self.push_decl(id, name, true),
|
||||
AssocItemId::ConstId(id) => self.push_decl(id, name, true),
|
||||
AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool)
|
||||
fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
|
||||
where
|
||||
L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
|
||||
<L as Lookup>::Data: HasSource,
|
||||
|
@ -287,13 +372,13 @@ impl<'a> SymbolCollector<'a> {
|
|||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: source.file_id,
|
||||
ptr: SyntaxNodePtr::new(source.value.syntax()),
|
||||
name_ptr: AstPtr::new(&name_node),
|
||||
name_ptr: AstPtr::new(&name_node).wrap_left(),
|
||||
};
|
||||
|
||||
if let Some(attrs) = def.attrs(self.db) {
|
||||
for alias in attrs.doc_aliases() {
|
||||
self.symbols.push(FileSymbol {
|
||||
name: alias.as_str().into(),
|
||||
self.symbols.insert(FileSymbol {
|
||||
name: alias.clone(),
|
||||
def,
|
||||
loc: dec_loc.clone(),
|
||||
container_name: self.current_container_name.clone(),
|
||||
|
@ -303,8 +388,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
self.symbols.push(FileSymbol {
|
||||
name: name_node.text().into(),
|
||||
self.symbols.insert(FileSymbol {
|
||||
name: name.symbol().clone(),
|
||||
def,
|
||||
container_name: self.current_container_name.clone(),
|
||||
loc: dec_loc,
|
||||
|
@ -313,7 +398,7 @@ impl<'a> SymbolCollector<'a> {
|
|||
});
|
||||
}
|
||||
|
||||
fn push_module(&mut self, module_id: ModuleId) {
|
||||
fn push_module(&mut self, module_id: ModuleId, name: &Name) {
|
||||
let def_map = module_id.def_map(self.db.upcast());
|
||||
let module_data = &def_map[module_id.local_id];
|
||||
let Some(declaration) = module_data.origin.declaration() else { return };
|
||||
|
@ -322,15 +407,15 @@ impl<'a> SymbolCollector<'a> {
|
|||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: declaration.file_id,
|
||||
ptr: SyntaxNodePtr::new(module.syntax()),
|
||||
name_ptr: AstPtr::new(&name_node),
|
||||
name_ptr: AstPtr::new(&name_node).wrap_left(),
|
||||
};
|
||||
|
||||
let def = ModuleDef::Module(module_id.into());
|
||||
|
||||
if let Some(attrs) = def.attrs(self.db) {
|
||||
for alias in attrs.doc_aliases() {
|
||||
self.symbols.push(FileSymbol {
|
||||
name: alias.as_str().into(),
|
||||
self.symbols.insert(FileSymbol {
|
||||
name: alias.clone(),
|
||||
def,
|
||||
loc: dec_loc.clone(),
|
||||
container_name: self.current_container_name.clone(),
|
||||
|
@ -340,8 +425,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
self.symbols.push(FileSymbol {
|
||||
name: name_node.text().into(),
|
||||
self.symbols.insert(FileSymbol {
|
||||
name: name.symbol().clone(),
|
||||
def: ModuleDef::Module(module_id.into()),
|
||||
container_name: self.current_container_name.clone(),
|
||||
loc: dec_loc,
|
||||
|
|
|
@ -5,7 +5,7 @@ use ide_db::imports::{
|
|||
insert_use::ImportScope,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{ast, AstNode, SyntaxNode, ToSmolStr, T};
|
||||
use syntax::{ast, AstNode, SyntaxNode, ToSmolStr};
|
||||
|
||||
use crate::{
|
||||
config::AutoImportExclusionType,
|
||||
|
@ -403,10 +403,11 @@ fn import_on_the_fly_method(
|
|||
|
||||
fn import_name(ctx: &CompletionContext<'_>) -> String {
|
||||
let token_kind = ctx.token.kind();
|
||||
if matches!(token_kind, T![.] | T![::]) {
|
||||
String::new()
|
||||
} else {
|
||||
|
||||
if token_kind.is_any_identifier() {
|
||||
ctx.token.to_string()
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -423,7 +423,7 @@ fn render_resolution_path(
|
|||
|
||||
let name = local_name.display_no_db(ctx.completion.edition).to_smolstr();
|
||||
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
|
||||
if local_name.is_escaped(completion.edition) {
|
||||
if local_name.needs_escape(completion.edition) {
|
||||
item.insert_text(local_name.display_no_db(completion.edition).to_smolstr());
|
||||
}
|
||||
// Add `<>` for generic types
|
||||
|
|
|
@ -1746,7 +1746,7 @@ fn intrinsics() {
|
|||
fn function() {
|
||||
transmute$0
|
||||
}
|
||||
"#,
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn transmute(…) (use core::mem::transmute) unsafe fn(Src) -> Dst
|
||||
"#]],
|
||||
|
@ -1767,7 +1767,9 @@ fn function() {
|
|||
mem::transmute$0
|
||||
}
|
||||
"#,
|
||||
expect![""],
|
||||
expect![[r#"
|
||||
fn transmute(…) (use core::mem) unsafe fn(Src) -> Dst
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
//! Look up accessible paths for items.
|
||||
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use hir::{
|
||||
db::HirDatabase, AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig,
|
||||
ItemInNs, ModPath, Module, ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics,
|
||||
ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
|
||||
SemanticsScope, Trait, TyFingerprint, Type,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::{
|
||||
ast::{self, make, HasName},
|
||||
AstNode, SmolStr, SyntaxNode,
|
||||
AstNode, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
@ -51,7 +53,7 @@ pub struct TraitImportCandidate {
|
|||
#[derive(Debug)]
|
||||
pub struct PathImportCandidate {
|
||||
/// Optional qualifier before name.
|
||||
pub qualifier: Vec<SmolStr>,
|
||||
pub qualifier: Vec<Name>,
|
||||
/// The name the item (struct, trait, enum, etc.) should have.
|
||||
pub name: NameToImport,
|
||||
}
|
||||
|
@ -70,10 +72,18 @@ pub enum NameToImport {
|
|||
|
||||
impl NameToImport {
|
||||
pub fn exact_case_sensitive(s: String) -> NameToImport {
|
||||
let s = match s.strip_prefix("r#") {
|
||||
Some(s) => s.to_owned(),
|
||||
None => s,
|
||||
};
|
||||
NameToImport::Exact(s, true)
|
||||
}
|
||||
|
||||
pub fn fuzzy(s: String) -> NameToImport {
|
||||
let s = match s.strip_prefix("r#") {
|
||||
Some(s) => s.to_owned(),
|
||||
None => s,
|
||||
};
|
||||
// unless all chars are lowercase, we do a case sensitive search
|
||||
let case_sensitive = s.chars().any(|c| c.is_uppercase());
|
||||
NameToImport::Fuzzy(s, case_sensitive)
|
||||
|
@ -350,21 +360,27 @@ fn path_applicable_imports(
|
|||
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
|
||||
.collect()
|
||||
}
|
||||
// we have some unresolved qualifier that we search an import for
|
||||
// The key here is that whatever we import must form a resolved path for the remainder of
|
||||
// what follows
|
||||
// FIXME: This doesn't handle visibility
|
||||
[first_qsegment, qualifier_rest @ ..] => items_locator::items_with_name(
|
||||
sema,
|
||||
current_crate,
|
||||
NameToImport::Exact(first_qsegment.to_string(), true),
|
||||
NameToImport::Exact(first_qsegment.as_str().to_owned(), true),
|
||||
AssocSearchMode::Exclude,
|
||||
)
|
||||
.filter_map(|item| {
|
||||
import_for_item(
|
||||
// we found imports for `first_qsegment`, now we need to filter these imports by whether
|
||||
// they result in resolving the rest of the path successfully
|
||||
validate_resolvable(
|
||||
sema,
|
||||
scope,
|
||||
mod_path,
|
||||
scope_filter,
|
||||
&path_candidate.name,
|
||||
item,
|
||||
qualifier_rest,
|
||||
scope_filter,
|
||||
)
|
||||
})
|
||||
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
|
||||
|
@ -372,14 +388,16 @@ fn path_applicable_imports(
|
|||
}
|
||||
}
|
||||
|
||||
fn import_for_item(
|
||||
/// Validates and builds an import for `resolved_qualifier` if the `unresolved_qualifier` appended
|
||||
/// to it resolves and there is a validate `candidate` after that.
|
||||
fn validate_resolvable(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
scope: &SemanticsScope<'_>,
|
||||
mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
|
||||
scope_filter: impl Fn(ItemInNs) -> bool,
|
||||
candidate: &NameToImport,
|
||||
resolved_qualifier: ItemInNs,
|
||||
unresolved_qualifier: &[SmolStr],
|
||||
scope_filter: impl Fn(ItemInNs) -> bool,
|
||||
unresolved_qualifier: &[Name],
|
||||
) -> Option<LocatedImport> {
|
||||
let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
|
||||
|
||||
|
@ -410,8 +428,11 @@ fn import_for_item(
|
|||
module,
|
||||
candidate.clone(),
|
||||
AssocSearchMode::Exclude,
|
||||
|it| match scope_filter(it) {
|
||||
true => ControlFlow::Break(it),
|
||||
false => ControlFlow::Continue(()),
|
||||
},
|
||||
)
|
||||
.find(|&it| scope_filter(it))
|
||||
.map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item))
|
||||
}
|
||||
// FIXME
|
||||
|
@ -709,7 +730,7 @@ fn path_import_candidate(
|
|||
if qualifier.first_qualifier().is_none_or(|it| sema.resolve_path(&it).is_none()) {
|
||||
let qualifier = qualifier
|
||||
.segments()
|
||||
.map(|seg| seg.name_ref().map(|name| SmolStr::new(name.text())))
|
||||
.map(|seg| seg.name_ref().map(|name| Name::new_root(&name.text())))
|
||||
.collect::<Option<Vec<_>>>()?;
|
||||
ImportCandidate::Path(PathImportCandidate { qualifier, name })
|
||||
} else {
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
//! by its name and a few criteria.
|
||||
//! The main reason for this module to exist is the fact that project's items and dependencies' items
|
||||
//! are located in different caches, with different APIs.
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use either::Either;
|
||||
use hir::{import_map, Crate, ItemInNs, Module, Semantics};
|
||||
use limit::Limit;
|
||||
|
@ -17,6 +19,7 @@ pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(100);
|
|||
|
||||
pub use import_map::AssocSearchMode;
|
||||
|
||||
// FIXME: Do callbacks instead to avoid allocations.
|
||||
/// Searches for importable items with the given name in the crate and its dependencies.
|
||||
pub fn items_with_name<'a>(
|
||||
sema: &'a Semantics<'_, RootDatabase>,
|
||||
|
@ -70,12 +73,13 @@ pub fn items_with_name<'a>(
|
|||
}
|
||||
|
||||
/// Searches for importable items with the given name in the crate and its dependencies.
|
||||
pub fn items_with_name_in_module<'a>(
|
||||
sema: &'a Semantics<'_, RootDatabase>,
|
||||
pub fn items_with_name_in_module<T>(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
module: Module,
|
||||
name: NameToImport,
|
||||
assoc_item_search: AssocSearchMode,
|
||||
) -> impl Iterator<Item = ItemInNs> + 'a {
|
||||
mut cb: impl FnMut(ItemInNs) -> ControlFlow<T>,
|
||||
) -> Option<T> {
|
||||
let _p = tracing::info_span!("items_with_name_in", name = name.text(), assoc_item_search = ?assoc_item_search, ?module)
|
||||
.entered();
|
||||
|
||||
|
@ -107,14 +111,12 @@ pub fn items_with_name_in_module<'a>(
|
|||
local_query
|
||||
}
|
||||
};
|
||||
let mut local_results = Vec::new();
|
||||
local_query.search(&[sema.db.module_symbols(module)], |local_candidate| {
|
||||
local_results.push(match local_candidate.def {
|
||||
cb(match local_candidate.def {
|
||||
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
|
||||
def => ItemInNs::from(def),
|
||||
})
|
||||
});
|
||||
local_results.into_iter()
|
||||
})
|
||||
}
|
||||
|
||||
fn find_items<'a>(
|
||||
|
@ -142,7 +144,8 @@ fn find_items<'a>(
|
|||
local_results.push(match local_candidate.def {
|
||||
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
|
||||
def => ItemInNs::from(def),
|
||||
})
|
||||
});
|
||||
ControlFlow::<()>::Continue(())
|
||||
});
|
||||
local_results.into_iter().chain(external_importables)
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ use std::{
|
|||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
mem,
|
||||
ops::ControlFlow,
|
||||
};
|
||||
|
||||
use base_db::{
|
||||
|
@ -136,16 +137,13 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
|
|||
// the module or crate indices for those in salsa unless we need to.
|
||||
.for_each(|module| symbol_collector.collect(module));
|
||||
|
||||
let mut symbols = symbol_collector.finish();
|
||||
symbols.shrink_to_fit();
|
||||
Arc::new(SymbolIndex::new(symbols))
|
||||
Arc::new(SymbolIndex::new(symbol_collector.finish()))
|
||||
}
|
||||
|
||||
fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
|
||||
let _p = tracing::info_span!("module_symbols").entered();
|
||||
|
||||
let symbols = SymbolCollector::collect_module(db.upcast(), module);
|
||||
Arc::new(SymbolIndex::new(symbols))
|
||||
Arc::new(SymbolIndex::new(SymbolCollector::collect_module(db.upcast(), module)))
|
||||
}
|
||||
|
||||
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
|
||||
|
@ -222,13 +220,16 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
|
|||
};
|
||||
|
||||
let mut res = vec![];
|
||||
query.search(&indices, |f| res.push(f.clone()));
|
||||
query.search::<()>(&indices, |f| {
|
||||
res.push(f.clone());
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SymbolIndex {
|
||||
symbols: Vec<FileSymbol>,
|
||||
symbols: Box<[FileSymbol]>,
|
||||
map: fst::Map<Vec<u8>>,
|
||||
}
|
||||
|
||||
|
@ -253,10 +254,10 @@ impl Hash for SymbolIndex {
|
|||
}
|
||||
|
||||
impl SymbolIndex {
|
||||
fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex {
|
||||
fn new(mut symbols: Box<[FileSymbol]>) -> SymbolIndex {
|
||||
fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering {
|
||||
let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase());
|
||||
let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase());
|
||||
let lhs_chars = lhs.name.as_str().chars().map(|c| c.to_ascii_lowercase());
|
||||
let rhs_chars = rhs.name.as_str().chars().map(|c| c.to_ascii_lowercase());
|
||||
lhs_chars.cmp(rhs_chars)
|
||||
}
|
||||
|
||||
|
@ -316,11 +317,11 @@ impl SymbolIndex {
|
|||
}
|
||||
|
||||
impl Query {
|
||||
pub(crate) fn search<'sym>(
|
||||
pub(crate) fn search<'sym, T>(
|
||||
self,
|
||||
indices: &'sym [Arc<SymbolIndex>],
|
||||
cb: impl FnMut(&'sym FileSymbol),
|
||||
) {
|
||||
cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>,
|
||||
) -> Option<T> {
|
||||
let _p = tracing::info_span!("symbol_index::Query::search").entered();
|
||||
let mut op = fst::map::OpBuilder::new();
|
||||
match self.mode {
|
||||
|
@ -351,12 +352,12 @@ impl Query {
|
|||
}
|
||||
}
|
||||
|
||||
fn search_maps<'sym>(
|
||||
fn search_maps<'sym, T>(
|
||||
&self,
|
||||
indices: &'sym [Arc<SymbolIndex>],
|
||||
mut stream: fst::map::Union<'_>,
|
||||
mut cb: impl FnMut(&'sym FileSymbol),
|
||||
) {
|
||||
mut cb: impl FnMut(&'sym FileSymbol) -> ControlFlow<T>,
|
||||
) -> Option<T> {
|
||||
let ignore_underscore_prefixed = !self.query.starts_with("__");
|
||||
while let Some((_, indexed_values)) = stream.next() {
|
||||
for &IndexedValue { index, value } in indexed_values {
|
||||
|
@ -377,15 +378,19 @@ impl Query {
|
|||
continue;
|
||||
}
|
||||
// Hide symbols that start with `__` unless the query starts with `__`
|
||||
if ignore_underscore_prefixed && symbol.name.starts_with("__") {
|
||||
let symbol_name = symbol.name.as_str();
|
||||
if ignore_underscore_prefixed && symbol_name.starts_with("__") {
|
||||
continue;
|
||||
}
|
||||
if self.mode.check(&self.query, self.case_sensitive, &symbol.name) {
|
||||
cb(symbol);
|
||||
if self.mode.check(&self.query, self.case_sensitive, symbol_name) {
|
||||
if let Some(b) = cb(symbol).break_value() {
|
||||
return Some(b);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool {
|
||||
|
@ -476,9 +481,9 @@ use Macro as ItemLikeMacro;
|
|||
use Macro as Trait; // overlay namespaces
|
||||
//- /b_mod.rs
|
||||
struct StructInModB;
|
||||
use super::Macro as SuperItemLikeMacro;
|
||||
use crate::b_mod::StructInModB as ThisStruct;
|
||||
use crate::Trait as IsThisJustATrait;
|
||||
pub(self) use super::Macro as SuperItemLikeMacro;
|
||||
pub(self) use crate::b_mod::StructInModB as ThisStruct;
|
||||
pub(self) use crate::Trait as IsThisJustATrait;
|
||||
"#,
|
||||
);
|
||||
|
||||
|
@ -487,7 +492,7 @@ use crate::Trait as IsThisJustATrait;
|
|||
.into_iter()
|
||||
.map(|module_id| {
|
||||
let mut symbols = SymbolCollector::collect_module(&db, module_id);
|
||||
symbols.sort_by_key(|it| it.name.clone());
|
||||
symbols.sort_by_key(|it| it.name.as_str().to_owned());
|
||||
(module_id, symbols)
|
||||
})
|
||||
.collect();
|
||||
|
@ -514,7 +519,7 @@ struct Duplicate;
|
|||
.into_iter()
|
||||
.map(|module_id| {
|
||||
let mut symbols = SymbolCollector::collect_module(&db, module_id);
|
||||
symbols.sort_by_key(|it| it.name.clone());
|
||||
symbols.sort_by_key(|it| it.name.as_str().to_owned());
|
||||
(module_id, symbols)
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -631,7 +631,7 @@
|
|||
def: Function(
|
||||
Function {
|
||||
id: FunctionId(
|
||||
3,
|
||||
2,
|
||||
),
|
||||
},
|
||||
),
|
||||
|
@ -664,7 +664,7 @@
|
|||
def: Function(
|
||||
Function {
|
||||
id: FunctionId(
|
||||
2,
|
||||
1,
|
||||
),
|
||||
},
|
||||
),
|
||||
|
@ -794,7 +794,7 @@
|
|||
def: Function(
|
||||
Function {
|
||||
id: FunctionId(
|
||||
1,
|
||||
3,
|
||||
),
|
||||
},
|
||||
),
|
||||
|
@ -877,6 +877,37 @@
|
|||
},
|
||||
},
|
||||
[
|
||||
FileSymbol {
|
||||
name: "IsThisJustATrait",
|
||||
def: Trait(
|
||||
Trait {
|
||||
id: TraitId(
|
||||
0,
|
||||
),
|
||||
},
|
||||
),
|
||||
loc: DeclarationLocation {
|
||||
hir_file_id: EditionedFileId(
|
||||
FileId(
|
||||
1,
|
||||
),
|
||||
Edition2021,
|
||||
),
|
||||
ptr: SyntaxNodePtr {
|
||||
kind: USE_TREE,
|
||||
range: 141..173,
|
||||
},
|
||||
name_ptr: AstPtr(
|
||||
SyntaxNodePtr {
|
||||
kind: NAME,
|
||||
range: 157..173,
|
||||
},
|
||||
),
|
||||
},
|
||||
container_name: None,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
},
|
||||
FileSymbol {
|
||||
name: "IsThisJustATrait",
|
||||
def: Macro(
|
||||
|
@ -897,12 +928,12 @@
|
|||
),
|
||||
ptr: SyntaxNodePtr {
|
||||
kind: USE_TREE,
|
||||
range: 111..143,
|
||||
range: 141..173,
|
||||
},
|
||||
name_ptr: AstPtr(
|
||||
SyntaxNodePtr {
|
||||
kind: NAME,
|
||||
range: 127..143,
|
||||
range: 157..173,
|
||||
},
|
||||
),
|
||||
},
|
||||
|
@ -963,78 +994,12 @@
|
|||
),
|
||||
ptr: SyntaxNodePtr {
|
||||
kind: USE_TREE,
|
||||
range: 25..59,
|
||||
range: 35..69,
|
||||
},
|
||||
name_ptr: AstPtr(
|
||||
SyntaxNodePtr {
|
||||
kind: NAME,
|
||||
range: 41..59,
|
||||
},
|
||||
),
|
||||
},
|
||||
container_name: None,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
},
|
||||
FileSymbol {
|
||||
name: "ThisStruct",
|
||||
def: Adt(
|
||||
Struct(
|
||||
Struct {
|
||||
id: StructId(
|
||||
4,
|
||||
),
|
||||
},
|
||||
),
|
||||
),
|
||||
loc: DeclarationLocation {
|
||||
hir_file_id: EditionedFileId(
|
||||
FileId(
|
||||
1,
|
||||
),
|
||||
Edition2021,
|
||||
),
|
||||
ptr: SyntaxNodePtr {
|
||||
kind: USE_TREE,
|
||||
range: 65..105,
|
||||
},
|
||||
name_ptr: AstPtr(
|
||||
SyntaxNodePtr {
|
||||
kind: NAME,
|
||||
range: 95..105,
|
||||
},
|
||||
),
|
||||
},
|
||||
container_name: None,
|
||||
is_alias: false,
|
||||
is_assoc: false,
|
||||
},
|
||||
FileSymbol {
|
||||
name: "ThisStruct",
|
||||
def: Adt(
|
||||
Struct(
|
||||
Struct {
|
||||
id: StructId(
|
||||
4,
|
||||
),
|
||||
},
|
||||
),
|
||||
),
|
||||
loc: DeclarationLocation {
|
||||
hir_file_id: EditionedFileId(
|
||||
FileId(
|
||||
1,
|
||||
),
|
||||
Edition2021,
|
||||
),
|
||||
ptr: SyntaxNodePtr {
|
||||
kind: USE_TREE,
|
||||
range: 65..105,
|
||||
},
|
||||
name_ptr: AstPtr(
|
||||
SyntaxNodePtr {
|
||||
kind: NAME,
|
||||
range: 95..105,
|
||||
range: 51..69,
|
||||
},
|
||||
),
|
||||
},
|
||||
|
|
|
@ -44,13 +44,16 @@ pub struct NavigationTarget {
|
|||
///
|
||||
/// This range must be contained within [`Self::full_range`].
|
||||
pub focus_range: Option<TextRange>,
|
||||
// FIXME: Symbol
|
||||
pub name: SmolStr,
|
||||
pub kind: Option<SymbolKind>,
|
||||
// FIXME: Symbol
|
||||
pub container_name: Option<SmolStr>,
|
||||
pub description: Option<String>,
|
||||
pub docs: Option<Documentation>,
|
||||
/// In addition to a `name` field, a `NavigationTarget` may also be aliased
|
||||
/// In such cases we want a `NavigationTarget` to be accessible by its alias
|
||||
// FIXME: Symbol
|
||||
pub alias: Option<SmolStr>,
|
||||
}
|
||||
|
||||
|
@ -191,10 +194,10 @@ impl TryToNav for FileSymbol {
|
|||
NavigationTarget {
|
||||
file_id,
|
||||
name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else(
|
||||
|| self.name.clone(),
|
||||
|| self.name.as_str().into(),
|
||||
|it| it.display_no_db(edition).to_smolstr(),
|
||||
),
|
||||
alias: self.is_alias.then(|| self.name.clone()),
|
||||
alias: self.is_alias.then(|| self.name.as_str().into()),
|
||||
kind: Some(self.def.into()),
|
||||
full_range,
|
||||
focus_range,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue