Use more let chain
This commit is contained in:
parent
9bb6e60d1f
commit
86fd5a1b44
16 changed files with 223 additions and 254 deletions
|
@ -164,10 +164,10 @@ impl Cfg {
|
||||||
/// Renders the configuration for human display, as a short HTML description.
|
/// Renders the configuration for human display, as a short HTML description.
|
||||||
pub(crate) fn render_short_html(&self) -> String {
|
pub(crate) fn render_short_html(&self) -> String {
|
||||||
let mut msg = Display(self, Format::ShortHtml).to_string();
|
let mut msg = Display(self, Format::ShortHtml).to_string();
|
||||||
if self.should_capitalize_first_letter() {
|
if self.should_capitalize_first_letter() &&
|
||||||
if let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric()) {
|
let Some(i) = msg.find(|c: char| c.is_ascii_alphanumeric())
|
||||||
msg[i..i + 1].make_ascii_uppercase();
|
{
|
||||||
}
|
msg[i..i + 1].make_ascii_uppercase();
|
||||||
}
|
}
|
||||||
msg
|
msg
|
||||||
}
|
}
|
||||||
|
|
|
@ -390,18 +390,17 @@ pub(crate) fn build_impl(
|
||||||
|
|
||||||
// Only inline impl if the implemented trait is
|
// Only inline impl if the implemented trait is
|
||||||
// reachable in rustdoc generated documentation
|
// reachable in rustdoc generated documentation
|
||||||
if !did.is_local() {
|
if !did.is_local() && let Some(traitref) = associated_trait {
|
||||||
if let Some(traitref) = associated_trait {
|
let did = traitref.def_id;
|
||||||
let did = traitref.def_id;
|
if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
|
||||||
if !cx.cache.effective_visibilities.is_directly_public(tcx, did) {
|
return;
|
||||||
return;
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(stab) = tcx.lookup_stability(did) {
|
if let Some(stab) = tcx.lookup_stability(did) &&
|
||||||
if stab.is_unstable() && stab.feature == sym::rustc_private {
|
stab.is_unstable() &&
|
||||||
return;
|
stab.feature == sym::rustc_private
|
||||||
}
|
{
|
||||||
}
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -525,10 +524,8 @@ pub(crate) fn build_impl(
|
||||||
}
|
}
|
||||||
|
|
||||||
while let Some(ty) = stack.pop() {
|
while let Some(ty) = stack.pop() {
|
||||||
if let Some(did) = ty.def_id(&cx.cache) {
|
if let Some(did) = ty.def_id(&cx.cache) && tcx.is_doc_hidden(did) {
|
||||||
if tcx.is_doc_hidden(did) {
|
return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(generics) = ty.generics() {
|
if let Some(generics) = ty.generics() {
|
||||||
stack.extend(generics);
|
stack.extend(generics);
|
||||||
|
|
|
@ -787,43 +787,43 @@ fn clean_ty_generics<'tcx>(
|
||||||
None
|
None
|
||||||
})();
|
})();
|
||||||
|
|
||||||
if let Some(param_idx) = param_idx {
|
if let Some(param_idx) = param_idx
|
||||||
if let Some(b) = impl_trait.get_mut(¶m_idx.into()) {
|
&& let Some(b) = impl_trait.get_mut(¶m_idx.into())
|
||||||
let p: WherePredicate = clean_predicate(*p, cx)?;
|
{
|
||||||
|
let p: WherePredicate = clean_predicate(*p, cx)?;
|
||||||
|
|
||||||
b.extend(
|
b.extend(
|
||||||
p.get_bounds()
|
p.get_bounds()
|
||||||
|
.into_iter()
|
||||||
|
.flatten()
|
||||||
|
.cloned()
|
||||||
|
.filter(|b| !b.is_sized_bound(cx)),
|
||||||
|
);
|
||||||
|
|
||||||
|
let proj = projection.map(|p| {
|
||||||
|
(
|
||||||
|
clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
|
||||||
|
p.map_bound(|p| p.term),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
if let Some(((_, trait_did, name), rhs)) = proj
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
|
||||||
|
{
|
||||||
|
// FIXME(...): Remove this unwrap()
|
||||||
|
impl_trait_proj.entry(param_idx).or_default().push((
|
||||||
|
trait_did,
|
||||||
|
name,
|
||||||
|
rhs.map_bound(|rhs| rhs.ty().unwrap()),
|
||||||
|
p.get_bound_params()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.cloned()
|
.map(|param| GenericParamDef::lifetime(param.0))
|
||||||
.filter(|b| !b.is_sized_bound(cx)),
|
.collect(),
|
||||||
);
|
));
|
||||||
|
|
||||||
let proj = projection.map(|p| {
|
|
||||||
(
|
|
||||||
clean_projection(p.map_bound(|p| p.projection_ty), cx, None),
|
|
||||||
p.map_bound(|p| p.term),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
if let Some(((_, trait_did, name), rhs)) = proj
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|(lhs, rhs): &(Type, _)| Some((lhs.projection()?, rhs)))
|
|
||||||
{
|
|
||||||
// FIXME(...): Remove this unwrap()
|
|
||||||
impl_trait_proj.entry(param_idx).or_default().push((
|
|
||||||
trait_did,
|
|
||||||
name,
|
|
||||||
rhs.map_bound(|rhs| rhs.ty().unwrap()),
|
|
||||||
p.get_bound_params()
|
|
||||||
.into_iter()
|
|
||||||
.flatten()
|
|
||||||
.map(|param| GenericParamDef::lifetime(param.0))
|
|
||||||
.collect(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
return None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(p)
|
Some(p)
|
||||||
|
@ -886,7 +886,7 @@ fn clean_ty_generics<'tcx>(
|
||||||
// `?Sized` bound for each one we didn't find to be `Sized`.
|
// `?Sized` bound for each one we didn't find to be `Sized`.
|
||||||
for tp in &stripped_params {
|
for tp in &stripped_params {
|
||||||
if let types::GenericParamDefKind::Type { .. } = tp.kind
|
if let types::GenericParamDefKind::Type { .. } = tp.kind
|
||||||
&& !sized_params.contains(&tp.name)
|
&& !sized_params.contains(&tp.name)
|
||||||
{
|
{
|
||||||
where_predicates.push(WherePredicate::BoundPredicate {
|
where_predicates.push(WherePredicate::BoundPredicate {
|
||||||
ty: Type::Generic(tp.name),
|
ty: Type::Generic(tp.name),
|
||||||
|
@ -1461,10 +1461,10 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||||
// Try to normalize `<X as Y>::T` to a type
|
// Try to normalize `<X as Y>::T` to a type
|
||||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||||
// `hir_to_ty` can return projection types with escaping vars for GATs, e.g. `<() as Trait>::Gat<'_>`
|
// `hir_to_ty` can return projection types with escaping vars for GATs, e.g. `<() as Trait>::Gat<'_>`
|
||||||
if !ty.has_escaping_bound_vars() {
|
if !ty.has_escaping_bound_vars()
|
||||||
if let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty)) {
|
&& let Some(normalized_value) = normalize(cx, ty::Binder::dummy(ty))
|
||||||
return clean_middle_ty(normalized_value, cx, None);
|
{
|
||||||
}
|
return clean_middle_ty(normalized_value, cx, None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let trait_segments = &p.segments[..p.segments.len() - 1];
|
let trait_segments = &p.segments[..p.segments.len() - 1];
|
||||||
|
@ -1878,11 +1878,9 @@ fn clean_middle_opaque_bounds<'tcx>(
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(sized) = cx.tcx.lang_items().sized_trait() {
|
if let Some(sized) = cx.tcx.lang_items().sized_trait() && trait_ref.def_id() == sized {
|
||||||
if trait_ref.def_id() == sized {
|
has_sized = true;
|
||||||
has_sized = true;
|
return None;
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let bindings: ThinVec<_> = bounds
|
let bindings: ThinVec<_> = bounds
|
||||||
|
@ -2392,17 +2390,15 @@ fn clean_use_statement_inner<'tcx>(
|
||||||
let is_visible_from_parent_mod =
|
let is_visible_from_parent_mod =
|
||||||
visibility.is_accessible_from(parent_mod, cx.tcx) && !current_mod.is_top_level_module();
|
visibility.is_accessible_from(parent_mod, cx.tcx) && !current_mod.is_top_level_module();
|
||||||
|
|
||||||
if pub_underscore {
|
if pub_underscore && let Some(ref inline) = inline_attr {
|
||||||
if let Some(ref inline) = inline_attr {
|
rustc_errors::struct_span_err!(
|
||||||
rustc_errors::struct_span_err!(
|
cx.tcx.sess,
|
||||||
cx.tcx.sess,
|
inline.span(),
|
||||||
inline.span(),
|
E0780,
|
||||||
E0780,
|
"anonymous imports cannot be inlined"
|
||||||
"anonymous imports cannot be inlined"
|
)
|
||||||
)
|
.span_label(import.span, "anonymous import")
|
||||||
.span_label(import.span, "anonymous import")
|
.emit();
|
||||||
.emit();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We consider inlining the documentation of `pub use` statements, but we
|
// We consider inlining the documentation of `pub use` statements, but we
|
||||||
|
@ -2438,14 +2434,13 @@ fn clean_use_statement_inner<'tcx>(
|
||||||
}
|
}
|
||||||
Import::new_glob(resolve_use_source(cx, path), true)
|
Import::new_glob(resolve_use_source(cx, path), true)
|
||||||
} else {
|
} else {
|
||||||
if inline_attr.is_none() {
|
if inline_attr.is_none()
|
||||||
if let Res::Def(DefKind::Mod, did) = path.res {
|
&& let Res::Def(DefKind::Mod, did) = path.res
|
||||||
if !did.is_local() && did.is_crate_root() {
|
&& !did.is_local() && did.is_crate_root()
|
||||||
// if we're `pub use`ing an extern crate root, don't inline it unless we
|
{
|
||||||
// were specifically asked for it
|
// if we're `pub use`ing an extern crate root, don't inline it unless we
|
||||||
denied = true;
|
// were specifically asked for it
|
||||||
}
|
denied = true;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if !denied {
|
if !denied {
|
||||||
let mut visited = DefIdSet::default();
|
let mut visited = DefIdSet::default();
|
||||||
|
|
|
@ -182,10 +182,8 @@ impl ExternalCrate {
|
||||||
return Local;
|
return Local;
|
||||||
}
|
}
|
||||||
|
|
||||||
if extern_url_takes_precedence {
|
if extern_url_takes_precedence && let Some(url) = extern_url {
|
||||||
if let Some(url) = extern_url {
|
return to_remote(url);
|
||||||
return to_remote(url);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Failing that, see if there's an attribute specifying where to find this
|
// Failing that, see if there's an attribute specifying where to find this
|
||||||
|
@ -1172,10 +1170,10 @@ impl GenericBound {
|
||||||
|
|
||||||
pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
|
pub(crate) fn is_sized_bound(&self, cx: &DocContext<'_>) -> bool {
|
||||||
use rustc_hir::TraitBoundModifier as TBM;
|
use rustc_hir::TraitBoundModifier as TBM;
|
||||||
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self {
|
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, TBM::None) = *self &&
|
||||||
if Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait() {
|
Some(trait_.def_id()) == cx.tcx.lang_items().sized_trait()
|
||||||
return true;
|
{
|
||||||
}
|
return true;
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
|
@ -345,10 +345,10 @@ pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind {
|
if let hir::ExprKind::Unary(hir::UnOp::Neg, expr) = &expr.kind &&
|
||||||
if let hir::ExprKind::Lit(_) = &expr.kind {
|
let hir::ExprKind::Lit(_) = &expr.kind
|
||||||
return true;
|
{
|
||||||
}
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -229,11 +229,11 @@ fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions {
|
||||||
if attr.has_name(sym::no_crate_inject) {
|
if attr.has_name(sym::no_crate_inject) {
|
||||||
opts.no_crate_inject = true;
|
opts.no_crate_inject = true;
|
||||||
}
|
}
|
||||||
if attr.has_name(sym::attr) {
|
if attr.has_name(sym::attr)
|
||||||
if let Some(l) = attr.meta_item_list() {
|
&& let Some(l) = attr.meta_item_list()
|
||||||
for item in l {
|
{
|
||||||
opts.attrs.push(pprust::meta_list_item_to_string(item));
|
for item in l {
|
||||||
}
|
opts.attrs.push(pprust::meta_list_item_to_string(item));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -594,31 +594,28 @@ pub(crate) fn make_test(
|
||||||
loop {
|
loop {
|
||||||
match parser.parse_item(ForceCollect::No) {
|
match parser.parse_item(ForceCollect::No) {
|
||||||
Ok(Some(item)) => {
|
Ok(Some(item)) => {
|
||||||
if !found_main {
|
if !found_main &&
|
||||||
if let ast::ItemKind::Fn(..) = item.kind {
|
let ast::ItemKind::Fn(..) = item.kind &&
|
||||||
if item.ident.name == sym::main {
|
item.ident.name == sym::main
|
||||||
found_main = true;
|
{
|
||||||
}
|
found_main = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found_extern_crate &&
|
||||||
|
let ast::ItemKind::ExternCrate(original) = item.kind
|
||||||
|
{
|
||||||
|
// This code will never be reached if `crate_name` is none because
|
||||||
|
// `found_extern_crate` is initialized to `true` if it is none.
|
||||||
|
let crate_name = crate_name.unwrap();
|
||||||
|
|
||||||
|
match original {
|
||||||
|
Some(name) => found_extern_crate = name.as_str() == crate_name,
|
||||||
|
None => found_extern_crate = item.ident.as_str() == crate_name,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !found_extern_crate {
|
if !found_macro && let ast::ItemKind::MacCall(..) = item.kind {
|
||||||
if let ast::ItemKind::ExternCrate(original) = item.kind {
|
found_macro = true;
|
||||||
// This code will never be reached if `crate_name` is none because
|
|
||||||
// `found_extern_crate` is initialized to `true` if it is none.
|
|
||||||
let crate_name = crate_name.unwrap();
|
|
||||||
|
|
||||||
match original {
|
|
||||||
Some(name) => found_extern_crate = name.as_str() == crate_name,
|
|
||||||
None => found_extern_crate = item.ident.as_str() == crate_name,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !found_macro {
|
|
||||||
if let ast::ItemKind::MacCall(..) = item.kind {
|
|
||||||
found_macro = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if found_main && found_extern_crate {
|
if found_main && found_extern_crate {
|
||||||
|
@ -972,14 +969,12 @@ impl Collector {
|
||||||
fn get_filename(&self) -> FileName {
|
fn get_filename(&self) -> FileName {
|
||||||
if let Some(ref source_map) = self.source_map {
|
if let Some(ref source_map) = self.source_map {
|
||||||
let filename = source_map.span_to_filename(self.position);
|
let filename = source_map.span_to_filename(self.position);
|
||||||
if let FileName::Real(ref filename) = filename {
|
if let FileName::Real(ref filename) = filename &&
|
||||||
if let Ok(cur_dir) = env::current_dir() {
|
let Ok(cur_dir) = env::current_dir() &&
|
||||||
if let Some(local_path) = filename.local_path() {
|
let Some(local_path) = filename.local_path() &&
|
||||||
if let Ok(path) = local_path.strip_prefix(&cur_dir) {
|
let Ok(path) = local_path.strip_prefix(&cur_dir)
|
||||||
return path.to_owned().into();
|
{
|
||||||
}
|
return path.to_owned().into();
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
filename
|
filename
|
||||||
} else if let Some(ref filename) = self.filename {
|
} else if let Some(ref filename) = self.filename {
|
||||||
|
|
|
@ -229,16 +229,15 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect all the implementors of traits.
|
// Collect all the implementors of traits.
|
||||||
if let clean::ImplItem(ref i) = *item.kind {
|
if let clean::ImplItem(ref i) = *item.kind &&
|
||||||
if let Some(trait_) = &i.trait_ {
|
let Some(trait_) = &i.trait_ &&
|
||||||
if !i.kind.is_blanket() {
|
!i.kind.is_blanket()
|
||||||
self.cache
|
{
|
||||||
.implementors
|
self.cache
|
||||||
.entry(trait_.def_id())
|
.implementors
|
||||||
.or_default()
|
.entry(trait_.def_id())
|
||||||
.push(Impl { impl_item: item.clone() });
|
.or_default()
|
||||||
}
|
.push(Impl { impl_item: item.clone() });
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Index this method for searching later on.
|
// Index this method for searching later on.
|
||||||
|
|
|
@ -709,11 +709,9 @@ pub(crate) fn href_with_root_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if !is_remote {
|
if !is_remote && let Some(root_path) = root_path {
|
||||||
if let Some(root_path) = root_path {
|
let root = root_path.trim_end_matches('/');
|
||||||
let root = root_path.trim_end_matches('/');
|
url_parts.push_front(root);
|
||||||
url_parts.push_front(root);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
debug!(?url_parts);
|
debug!(?url_parts);
|
||||||
match shortty {
|
match shortty {
|
||||||
|
|
|
@ -466,10 +466,8 @@ impl<'a> PeekIter<'a> {
|
||||||
}
|
}
|
||||||
/// Returns the next item after the current one. It doesn't interfere with `peek_next` output.
|
/// Returns the next item after the current one. It doesn't interfere with `peek_next` output.
|
||||||
fn peek(&mut self) -> Option<&(TokenKind, &'a str)> {
|
fn peek(&mut self) -> Option<&(TokenKind, &'a str)> {
|
||||||
if self.stored.is_empty() {
|
if self.stored.is_empty() && let Some(next) = self.iter.next() {
|
||||||
if let Some(next) = self.iter.next() {
|
self.stored.push_back(next);
|
||||||
self.stored.push_back(next);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
self.stored.front()
|
self.stored.front()
|
||||||
}
|
}
|
||||||
|
|
|
@ -705,14 +705,12 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
||||||
shared.fs.write(scrape_examples_help_file, v)?;
|
shared.fs.write(scrape_examples_help_file, v)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref redirections) = shared.redirections {
|
if let Some(ref redirections) = shared.redirections && !redirections.borrow().is_empty() {
|
||||||
if !redirections.borrow().is_empty() {
|
let redirect_map_path =
|
||||||
let redirect_map_path =
|
self.dst.join(crate_name.as_str()).join("redirect-map.json");
|
||||||
self.dst.join(crate_name.as_str()).join("redirect-map.json");
|
let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
|
||||||
let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
|
shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
|
||||||
shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
|
shared.fs.write(redirect_map_path, paths)?;
|
||||||
shared.fs.write(redirect_map_path, paths)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// No need for it anymore.
|
// No need for it anymore.
|
||||||
|
|
|
@ -2225,14 +2225,13 @@ fn sidebar_deref_methods(
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
debug!("found target, real_target: {:?} {:?}", target, real_target);
|
debug!("found target, real_target: {:?} {:?}", target, real_target);
|
||||||
if let Some(did) = target.def_id(c) {
|
if let Some(did) = target.def_id(c) &&
|
||||||
if let Some(type_did) = impl_.inner_impl().for_.def_id(c) {
|
let Some(type_did) = impl_.inner_impl().for_.def_id(c) &&
|
||||||
// `impl Deref<Target = S> for S`
|
// `impl Deref<Target = S> for S`
|
||||||
if did == type_did || !derefs.insert(did) {
|
(did == type_did || !derefs.insert(did))
|
||||||
// Avoid infinite cycles
|
{
|
||||||
return;
|
// Avoid infinite cycles
|
||||||
}
|
return;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
|
let deref_mut = v.iter().any(|i| i.trait_did() == cx.tcx().lang_items().deref_mut_trait());
|
||||||
let inner_impl = target
|
let inner_impl = target
|
||||||
|
@ -2266,25 +2265,24 @@ fn sidebar_deref_methods(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recurse into any further impls that might exist for `target`
|
// Recurse into any further impls that might exist for `target`
|
||||||
if let Some(target_did) = target.def_id(c) {
|
if let Some(target_did) = target.def_id(c) &&
|
||||||
if let Some(target_impls) = c.impls.get(&target_did) {
|
let Some(target_impls) = c.impls.get(&target_did) &&
|
||||||
if let Some(target_deref_impl) = target_impls.iter().find(|i| {
|
let Some(target_deref_impl) = target_impls.iter().find(|i| {
|
||||||
i.inner_impl()
|
i.inner_impl()
|
||||||
.trait_
|
.trait_
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
|
.map(|t| Some(t.def_id()) == cx.tcx().lang_items().deref_trait())
|
||||||
.unwrap_or(false)
|
.unwrap_or(false)
|
||||||
}) {
|
})
|
||||||
sidebar_deref_methods(
|
{
|
||||||
cx,
|
sidebar_deref_methods(
|
||||||
out,
|
cx,
|
||||||
target_deref_impl,
|
out,
|
||||||
target_impls,
|
target_deref_impl,
|
||||||
derefs,
|
target_impls,
|
||||||
used_links,
|
derefs,
|
||||||
);
|
used_links,
|
||||||
}
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,12 +80,11 @@ impl<'tcx> JsonRenderer<'tcx> {
|
||||||
// document primitive items in an arbitrary crate by using
|
// document primitive items in an arbitrary crate by using
|
||||||
// `doc(primitive)`.
|
// `doc(primitive)`.
|
||||||
let mut is_primitive_impl = false;
|
let mut is_primitive_impl = false;
|
||||||
if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind {
|
if let clean::types::ItemKind::ImplItem(ref impl_) = *item.kind &&
|
||||||
if impl_.trait_.is_none() {
|
impl_.trait_.is_none() &&
|
||||||
if let clean::types::Type::Primitive(_) = impl_.for_ {
|
let clean::types::Type::Primitive(_) = impl_.for_
|
||||||
is_primitive_impl = true;
|
{
|
||||||
}
|
is_primitive_impl = true;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if item.item_id.is_local() || is_primitive_impl {
|
if item.item_id.is_local() || is_primitive_impl {
|
||||||
|
|
|
@ -82,18 +82,17 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
|
||||||
let def_id = item.item_id.expect_def_id().expect_local();
|
let def_id = item.item_id.expect_def_id().expect_local();
|
||||||
|
|
||||||
// check if parent is trait impl
|
// check if parent is trait impl
|
||||||
if let Some(parent_def_id) = cx.tcx.opt_local_parent(def_id) {
|
if let Some(parent_def_id) = cx.tcx.opt_local_parent(def_id) &&
|
||||||
if let Some(parent_node) = cx.tcx.hir().find_by_def_id(parent_def_id) {
|
let Some(parent_node) = cx.tcx.hir().find_by_def_id(parent_def_id) &&
|
||||||
if matches!(
|
matches!(
|
||||||
parent_node,
|
parent_node,
|
||||||
hir::Node::Item(hir::Item {
|
hir::Node::Item(hir::Item {
|
||||||
kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
|
kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }),
|
||||||
..
|
..
|
||||||
})
|
})
|
||||||
) {
|
)
|
||||||
return false;
|
{
|
||||||
}
|
return false;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if cx.tcx.is_doc_hidden(def_id.to_def_id())
|
if cx.tcx.is_doc_hidden(def_id.to_def_id())
|
||||||
|
|
|
@ -156,39 +156,38 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
|
||||||
|
|
||||||
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
|
// scan through included items ahead of time to splice in Deref targets to the "valid" sets
|
||||||
for it in new_items_external.iter().chain(new_items_local.iter()) {
|
for it in new_items_external.iter().chain(new_items_local.iter()) {
|
||||||
if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind {
|
if let ImplItem(box Impl { ref for_, ref trait_, ref items, .. }) = *it.kind &&
|
||||||
if trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait()
|
trait_.as_ref().map(|t| t.def_id()) == cx.tcx.lang_items().deref_trait() &&
|
||||||
&& cleaner.keep_impl(for_, true)
|
cleaner.keep_impl(for_, true)
|
||||||
{
|
{
|
||||||
let target = items
|
let target = items
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|item| match *item.kind {
|
.find_map(|item| match *item.kind {
|
||||||
AssocTypeItem(ref t, _) => Some(&t.type_),
|
AssocTypeItem(ref t, _) => Some(&t.type_),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.expect("Deref impl without Target type");
|
.expect("Deref impl without Target type");
|
||||||
|
|
||||||
if let Some(prim) = target.primitive_type() {
|
if let Some(prim) = target.primitive_type() {
|
||||||
cleaner.prims.insert(prim);
|
cleaner.prims.insert(prim);
|
||||||
} else if let Some(did) = target.def_id(&cx.cache) {
|
} else if let Some(did) = target.def_id(&cx.cache) {
|
||||||
cleaner.items.insert(did.into());
|
cleaner.items.insert(did.into());
|
||||||
}
|
}
|
||||||
if let Some(for_did) = for_.def_id(&cx.cache) {
|
if let Some(for_did) = for_.def_id(&cx.cache) {
|
||||||
if type_did_to_deref_target.insert(for_did, target).is_none() {
|
if type_did_to_deref_target.insert(for_did, target).is_none() {
|
||||||
// Since only the `DefId` portion of the `Type` instances is known to be same for both the
|
// Since only the `DefId` portion of the `Type` instances is known to be same for both the
|
||||||
// `Deref` target type and the impl for type positions, this map of types is keyed by
|
// `Deref` target type and the impl for type positions, this map of types is keyed by
|
||||||
// `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
|
// `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
|
||||||
if cleaner.keep_impl_with_def_id(for_did.into()) {
|
if cleaner.keep_impl_with_def_id(for_did.into()) {
|
||||||
let mut targets = DefIdSet::default();
|
let mut targets = DefIdSet::default();
|
||||||
targets.insert(for_did);
|
targets.insert(for_did);
|
||||||
add_deref_target(
|
add_deref_target(
|
||||||
cx,
|
cx,
|
||||||
&type_did_to_deref_target,
|
&type_did_to_deref_target,
|
||||||
&mut cleaner,
|
&mut cleaner,
|
||||||
&mut targets,
|
&mut targets,
|
||||||
for_did,
|
for_did,
|
||||||
);
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,11 +210,9 @@ fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
|
||||||
.take_while(|(_, c)| is_id_start(*c) || is_id_continue(*c))
|
.take_while(|(_, c)| is_id_start(*c) || is_id_continue(*c))
|
||||||
.reduce(|_accum, item| item)
|
.reduce(|_accum, item| item)
|
||||||
.and_then(|(new_pos, c)| is_id_start(c).then_some(new_pos));
|
.and_then(|(new_pos, c)| is_id_start(c).then_some(new_pos));
|
||||||
if let Some(new_pos) = new_pos {
|
if let Some(new_pos) = new_pos && current_pos != new_pos {
|
||||||
if current_pos != new_pos {
|
current_pos = new_pos;
|
||||||
current_pos = new_pos;
|
continue;
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
|
@ -201,27 +201,25 @@ impl<'a> DocFolder for ImplStripper<'a, '_> {
|
||||||
// Because we don't inline in `maybe_inline_local` if the output format is JSON,
|
// Because we don't inline in `maybe_inline_local` if the output format is JSON,
|
||||||
// we need to make a special check for JSON output: we want to keep it unless it has
|
// we need to make a special check for JSON output: we want to keep it unless it has
|
||||||
// a `#[doc(hidden)]` attribute if the `for_` type is exported.
|
// a `#[doc(hidden)]` attribute if the `for_` type is exported.
|
||||||
if let Some(did) = imp.for_.def_id(self.cache) {
|
if let Some(did) = imp.for_.def_id(self.cache) &&
|
||||||
if !imp.for_.is_assoc_ty() && !self.should_keep_impl(&i, did) {
|
!imp.for_.is_assoc_ty() && !self.should_keep_impl(&i, did)
|
||||||
debug!("ImplStripper: impl item for stripped type; removing");
|
{
|
||||||
return None;
|
debug!("ImplStripper: impl item for stripped type; removing");
|
||||||
}
|
return None;
|
||||||
}
|
}
|
||||||
if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) {
|
if let Some(did) = imp.trait_.as_ref().map(|t| t.def_id()) &&
|
||||||
if !self.should_keep_impl(&i, did) {
|
!self.should_keep_impl(&i, did) {
|
||||||
debug!("ImplStripper: impl item for stripped trait; removing");
|
debug!("ImplStripper: impl item for stripped trait; removing");
|
||||||
return None;
|
return None;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if let Some(generics) = imp.trait_.as_ref().and_then(|t| t.generics()) {
|
if let Some(generics) = imp.trait_.as_ref().and_then(|t| t.generics()) {
|
||||||
for typaram in generics {
|
for typaram in generics {
|
||||||
if let Some(did) = typaram.def_id(self.cache) {
|
if let Some(did) = typaram.def_id(self.cache) && !self.should_keep_impl(&i, did)
|
||||||
if !self.should_keep_impl(&i, did) {
|
{
|
||||||
debug!(
|
debug!(
|
||||||
"ImplStripper: stripped item in trait's generics; removing impl"
|
"ImplStripper: stripped item in trait's generics; removing impl"
|
||||||
);
|
);
|
||||||
return None;
|
return None;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue