Improve doc alias discovery
This commit is contained in:
parent
7590c393de
commit
cf41b1d3a5
2 changed files with 61 additions and 39 deletions
|
@ -278,7 +278,7 @@ pub struct RenderInfo {
|
|||
/// Struct representing one entry in the JS search index. These are all emitted
|
||||
/// by hand to a large JS file at the end of cache-creation.
|
||||
#[derive(Debug)]
|
||||
struct IndexItem {
|
||||
pub struct IndexItem {
|
||||
ty: ItemType,
|
||||
name: String,
|
||||
path: String,
|
||||
|
@ -293,7 +293,12 @@ impl Serialize for IndexItem {
|
|||
where
|
||||
S: Serializer,
|
||||
{
|
||||
assert_eq!(self.parent.is_some(), self.parent_idx.is_some());
|
||||
assert_eq!(
|
||||
self.parent.is_some(),
|
||||
self.parent_idx.is_some(),
|
||||
"`{}` is missing idx",
|
||||
self.name
|
||||
);
|
||||
|
||||
(self.ty, &self.name, &self.path, &self.desc, self.parent_idx, &self.search_type)
|
||||
.serialize(serializer)
|
||||
|
@ -836,7 +841,7 @@ themePicker.onblur = handleThemeButtonsBlur;
|
|||
{
|
||||
let (mut all_aliases, _) = try_err!(collect(&dst, &krate.name, "ALIASES"), &dst);
|
||||
let mut output = String::with_capacity(100);
|
||||
for (alias, items) in &cx.cache.aliases {
|
||||
for (alias, items) in cx.cache.get_aliases() {
|
||||
if items.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ crate struct Cache {
|
|||
|
||||
/// Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias,
|
||||
/// we need the alias element to have an array of items.
|
||||
pub(super) aliases: FxHashMap<String, Vec<IndexItem>>,
|
||||
pub(super) aliases: FxHashMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
|
@ -311,7 +311,7 @@ impl DocFolder for Cache {
|
|||
};
|
||||
|
||||
match parent {
|
||||
(parent, Some(path)) if is_inherent_impl_item || (!self.stripped_mod) => {
|
||||
(parent, Some(path)) if is_inherent_impl_item || !self.stripped_mod => {
|
||||
debug_assert!(!item.is_stripped());
|
||||
|
||||
// A crate has a module at its root, containing all items,
|
||||
|
@ -327,6 +327,21 @@ impl DocFolder for Cache {
|
|||
parent_idx: None,
|
||||
search_type: get_index_search_type(&item),
|
||||
});
|
||||
|
||||
for alias in item
|
||||
.attrs
|
||||
.lists(sym::doc)
|
||||
.filter(|a| a.check_name(sym::alias))
|
||||
.filter_map(|a| a.value_str().map(|s| s.to_string().replace("\"", "")))
|
||||
.filter(|v| !v.is_empty())
|
||||
.collect::<FxHashSet<_>>()
|
||||
.into_iter()
|
||||
{
|
||||
self.aliases
|
||||
.entry(alias.to_lowercase())
|
||||
.or_insert(Vec::with_capacity(1))
|
||||
.push(self.search_index.len() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
(Some(parent), None) if is_inherent_impl_item => {
|
||||
|
@ -363,6 +378,9 @@ impl DocFolder for Cache {
|
|||
| clean::MacroItem(..)
|
||||
| clean::ProcMacroItem(..)
|
||||
| clean::VariantItem(..)
|
||||
| clean::StructFieldItem(..)
|
||||
| clean::TyMethodItem(..)
|
||||
| clean::MethodItem(..)
|
||||
if !self.stripped_mod =>
|
||||
{
|
||||
// Re-exported items mean that the same id can show up twice
|
||||
|
@ -376,11 +394,8 @@ impl DocFolder for Cache {
|
|||
{
|
||||
self.paths.insert(item.def_id, (self.stack.clone(), item.type_()));
|
||||
}
|
||||
self.add_aliases(&item);
|
||||
}
|
||||
|
||||
clean::PrimitiveItem(..) => {
|
||||
self.add_aliases(&item);
|
||||
self.paths.insert(item.def_id, (self.stack.clone(), item.type_()));
|
||||
}
|
||||
|
||||
|
@ -489,36 +504,23 @@ impl DocFolder for Cache {
|
|||
}
|
||||
|
||||
impl Cache {
|
||||
fn add_aliases(&mut self, item: &clean::Item) {
|
||||
if item.def_id.index == CRATE_DEF_INDEX {
|
||||
return;
|
||||
}
|
||||
if let Some(ref item_name) = item.name {
|
||||
let path = self
|
||||
.paths
|
||||
.get(&item.def_id)
|
||||
.map(|p| p.0[..p.0.len() - 1].join("::"))
|
||||
.unwrap_or("std".to_owned());
|
||||
for alias in item
|
||||
.attrs
|
||||
.lists(sym::doc)
|
||||
.filter(|a| a.check_name(sym::alias))
|
||||
.filter_map(|a| a.value_str().map(|s| s.to_string().replace("\"", "")))
|
||||
.filter(|v| !v.is_empty())
|
||||
.collect::<FxHashSet<_>>()
|
||||
.into_iter()
|
||||
{
|
||||
self.aliases.entry(alias).or_insert(Vec::with_capacity(1)).push(IndexItem {
|
||||
ty: item.type_(),
|
||||
name: item_name.to_string(),
|
||||
path: path.clone(),
|
||||
desc: shorten(plain_summary_line(item.doc_value())),
|
||||
parent: None,
|
||||
parent_idx: None,
|
||||
search_type: get_index_search_type(&item),
|
||||
});
|
||||
}
|
||||
}
|
||||
pub fn get_aliases<'a>(&'a self) -> FxHashMap<String, Vec<&'a IndexItem>> {
|
||||
self.aliases
|
||||
.iter()
|
||||
.map(|(k, values)| {
|
||||
(
|
||||
k.clone(),
|
||||
values
|
||||
.iter()
|
||||
.filter(|v| {
|
||||
let x = &self.search_index[**v];
|
||||
x.parent_idx.is_some() == x.parent.is_some()
|
||||
})
|
||||
.map(|v| &self.search_index[*v])
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -567,7 +569,8 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
|||
let mut crate_items = Vec::with_capacity(cache.search_index.len());
|
||||
let mut crate_paths = vec![];
|
||||
|
||||
let Cache { ref mut search_index, ref orphan_impl_items, ref paths, .. } = *cache;
|
||||
let Cache { ref mut search_index, ref orphan_impl_items, ref paths, ref mut aliases, .. } =
|
||||
*cache;
|
||||
|
||||
// Attach all orphan items to the type's definition if the type
|
||||
// has since been learned.
|
||||
|
@ -582,6 +585,20 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
|
|||
parent_idx: None,
|
||||
search_type: get_index_search_type(&item),
|
||||
});
|
||||
for alias in item
|
||||
.attrs
|
||||
.lists(sym::doc)
|
||||
.filter(|a| a.check_name(sym::alias))
|
||||
.filter_map(|a| a.value_str().map(|s| s.to_string().replace("\"", "")))
|
||||
.filter(|v| !v.is_empty())
|
||||
.collect::<FxHashSet<_>>()
|
||||
.into_iter()
|
||||
{
|
||||
aliases
|
||||
.entry(alias.to_lowercase())
|
||||
.or_insert(Vec::with_capacity(1))
|
||||
.push(search_index.len() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue