Auto merge of #97365 - klensy:rustdoc-vs-clippy, r=notriddle
rustdoc: fix few clippy lints Fix few clippy lints: second commit - perf ones, first - other ones.
This commit is contained in:
commit
6ac8adad1f
22 changed files with 104 additions and 105 deletions
|
@ -643,11 +643,11 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||||
/// both for visual consistency between 'rustdoc' runs, and to
|
/// both for visual consistency between 'rustdoc' runs, and to
|
||||||
/// make writing tests much easier
|
/// make writing tests much easier
|
||||||
#[inline]
|
#[inline]
|
||||||
fn sort_where_predicates(&self, mut predicates: &mut Vec<WherePredicate>) {
|
fn sort_where_predicates(&self, predicates: &mut Vec<WherePredicate>) {
|
||||||
// We should never have identical bounds - and if we do,
|
// We should never have identical bounds - and if we do,
|
||||||
// they're visually identical as well. Therefore, using
|
// they're visually identical as well. Therefore, using
|
||||||
// an unstable sort is fine.
|
// an unstable sort is fine.
|
||||||
self.unstable_debug_sort(&mut predicates);
|
self.unstable_debug_sort(predicates);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Ensure that the bounds are in a consistent order. The precise
|
/// Ensure that the bounds are in a consistent order. The precise
|
||||||
|
@ -656,11 +656,11 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||||
/// both for visual consistency between 'rustdoc' runs, and to
|
/// both for visual consistency between 'rustdoc' runs, and to
|
||||||
/// make writing tests much easier
|
/// make writing tests much easier
|
||||||
#[inline]
|
#[inline]
|
||||||
fn sort_where_bounds(&self, mut bounds: &mut Vec<GenericBound>) {
|
fn sort_where_bounds(&self, bounds: &mut Vec<GenericBound>) {
|
||||||
// We should never have identical bounds - and if we do,
|
// We should never have identical bounds - and if we do,
|
||||||
// they're visually identical as well. Therefore, using
|
// they're visually identical as well. Therefore, using
|
||||||
// an unstable sort is fine.
|
// an unstable sort is fine.
|
||||||
self.unstable_debug_sort(&mut bounds);
|
self.unstable_debug_sort(bounds);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This might look horrendously hacky, but it's actually not that bad.
|
/// This might look horrendously hacky, but it's actually not that bad.
|
||||||
|
|
|
@ -248,7 +248,7 @@ impl<'tcx> Clean<'tcx, Option<WherePredicate>> for hir::WherePredicate<'tcx> {
|
||||||
hir::WherePredicate::BoundPredicate(ref wbp) => {
|
hir::WherePredicate::BoundPredicate(ref wbp) => {
|
||||||
let bound_params = wbp
|
let bound_params = wbp
|
||||||
.bound_generic_params
|
.bound_generic_params
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|param| {
|
.map(|param| {
|
||||||
// Higher-ranked params must be lifetimes.
|
// Higher-ranked params must be lifetimes.
|
||||||
// Higher-ranked lifetimes can't have bounds.
|
// Higher-ranked lifetimes can't have bounds.
|
||||||
|
@ -525,7 +525,7 @@ fn clean_generic_param<'tcx>(
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
hir::GenericParamKind::Const { ref ty, default } => (
|
hir::GenericParamKind::Const { ty, default } => (
|
||||||
param.name.ident().name,
|
param.name.ident().name,
|
||||||
GenericParamDefKind::Const {
|
GenericParamDefKind::Const {
|
||||||
did: cx.tcx.hir().local_def_id(param.hir_id).to_def_id(),
|
did: cx.tcx.hir().local_def_id(param.hir_id).to_def_id(),
|
||||||
|
@ -947,7 +947,7 @@ fn clean_fn_decl_from_did_and_sig<'tcx>(
|
||||||
// We assume all empty tuples are default return type. This theoretically can discard `-> ()`,
|
// We assume all empty tuples are default return type. This theoretically can discard `-> ()`,
|
||||||
// but shouldn't change any code meaning.
|
// but shouldn't change any code meaning.
|
||||||
let output = match sig.skip_binder().output().clean(cx) {
|
let output = match sig.skip_binder().output().clean(cx) {
|
||||||
Type::Tuple(inner) if inner.len() == 0 => DefaultReturn,
|
Type::Tuple(inner) if inner.is_empty() => DefaultReturn,
|
||||||
ty => Return(ty),
|
ty => Return(ty),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -972,7 +972,7 @@ fn clean_fn_decl_from_did_and_sig<'tcx>(
|
||||||
impl<'tcx> Clean<'tcx, FnRetTy> for hir::FnRetTy<'tcx> {
|
impl<'tcx> Clean<'tcx, FnRetTy> for hir::FnRetTy<'tcx> {
|
||||||
fn clean(&self, cx: &mut DocContext<'tcx>) -> FnRetTy {
|
fn clean(&self, cx: &mut DocContext<'tcx>) -> FnRetTy {
|
||||||
match *self {
|
match *self {
|
||||||
Self::Return(ref typ) => Return(typ.clean(cx)),
|
Self::Return(typ) => Return(typ.clean(cx)),
|
||||||
Self::DefaultReturn(..) => DefaultReturn,
|
Self::DefaultReturn(..) => DefaultReturn,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1013,13 +1013,13 @@ impl<'tcx> Clean<'tcx, Item> for hir::TraitItem<'tcx> {
|
||||||
let local_did = self.def_id.to_def_id();
|
let local_did = self.def_id.to_def_id();
|
||||||
cx.with_param_env(local_did, |cx| {
|
cx.with_param_env(local_did, |cx| {
|
||||||
let inner = match self.kind {
|
let inner = match self.kind {
|
||||||
hir::TraitItemKind::Const(ref ty, Some(default)) => AssocConstItem(
|
hir::TraitItemKind::Const(ty, Some(default)) => AssocConstItem(
|
||||||
ty.clean(cx),
|
ty.clean(cx),
|
||||||
ConstantKind::Local { def_id: local_did, body: default },
|
ConstantKind::Local { def_id: local_did, body: default },
|
||||||
),
|
),
|
||||||
hir::TraitItemKind::Const(ref ty, None) => TyAssocConstItem(ty.clean(cx)),
|
hir::TraitItemKind::Const(ty, None) => TyAssocConstItem(ty.clean(cx)),
|
||||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
|
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
|
||||||
let m = clean_function(cx, sig, &self.generics, body);
|
let m = clean_function(cx, sig, self.generics, body);
|
||||||
MethodItem(m, None)
|
MethodItem(m, None)
|
||||||
}
|
}
|
||||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
|
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
|
||||||
|
@ -1060,16 +1060,16 @@ impl<'tcx> Clean<'tcx, Item> for hir::ImplItem<'tcx> {
|
||||||
let local_did = self.def_id.to_def_id();
|
let local_did = self.def_id.to_def_id();
|
||||||
cx.with_param_env(local_did, |cx| {
|
cx.with_param_env(local_did, |cx| {
|
||||||
let inner = match self.kind {
|
let inner = match self.kind {
|
||||||
hir::ImplItemKind::Const(ref ty, expr) => {
|
hir::ImplItemKind::Const(ty, expr) => {
|
||||||
let default = ConstantKind::Local { def_id: local_did, body: expr };
|
let default = ConstantKind::Local { def_id: local_did, body: expr };
|
||||||
AssocConstItem(ty.clean(cx), default)
|
AssocConstItem(ty.clean(cx), default)
|
||||||
}
|
}
|
||||||
hir::ImplItemKind::Fn(ref sig, body) => {
|
hir::ImplItemKind::Fn(ref sig, body) => {
|
||||||
let m = clean_function(cx, sig, &self.generics, body);
|
let m = clean_function(cx, sig, self.generics, body);
|
||||||
let defaultness = cx.tcx.associated_item(self.def_id).defaultness;
|
let defaultness = cx.tcx.associated_item(self.def_id).defaultness;
|
||||||
MethodItem(m, Some(defaultness))
|
MethodItem(m, Some(defaultness))
|
||||||
}
|
}
|
||||||
hir::ImplItemKind::TyAlias(ref hir_ty) => {
|
hir::ImplItemKind::TyAlias(hir_ty) => {
|
||||||
let type_ = hir_ty.clean(cx);
|
let type_ = hir_ty.clean(cx);
|
||||||
let generics = self.generics.clean(cx);
|
let generics = self.generics.clean(cx);
|
||||||
let item_type = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
let item_type = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
||||||
|
@ -1292,7 +1292,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||||
let hir::TyKind::Path(qpath) = kind else { unreachable!() };
|
let hir::TyKind::Path(qpath) = kind else { unreachable!() };
|
||||||
|
|
||||||
match qpath {
|
match qpath {
|
||||||
hir::QPath::Resolved(None, ref path) => {
|
hir::QPath::Resolved(None, path) => {
|
||||||
if let Res::Def(DefKind::TyParam, did) = path.res {
|
if let Res::Def(DefKind::TyParam, did) = path.res {
|
||||||
if let Some(new_ty) = cx.substs.get(&did).and_then(|p| p.as_ty()).cloned() {
|
if let Some(new_ty) = cx.substs.get(&did).and_then(|p| p.as_ty()).cloned() {
|
||||||
return new_ty;
|
return new_ty;
|
||||||
|
@ -1309,7 +1309,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||||
resolve_type(cx, path)
|
resolve_type(cx, path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::QPath::Resolved(Some(ref qself), p) => {
|
hir::QPath::Resolved(Some(qself), p) => {
|
||||||
// Try to normalize `<X as Y>::T` to a type
|
// Try to normalize `<X as Y>::T` to a type
|
||||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||||
if let Some(normalized_value) = normalize(cx, ty) {
|
if let Some(normalized_value) = normalize(cx, ty) {
|
||||||
|
@ -1333,7 +1333,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||||
trait_,
|
trait_,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::QPath::TypeRelative(ref qself, segment) => {
|
hir::QPath::TypeRelative(qself, segment) => {
|
||||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||||
let res = match ty.kind() {
|
let res = match ty.kind() {
|
||||||
ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
|
ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
|
||||||
|
@ -1463,8 +1463,8 @@ impl<'tcx> Clean<'tcx, Type> for hir::Ty<'tcx> {
|
||||||
let lifetime = if elided { None } else { Some(l.clean(cx)) };
|
let lifetime = if elided { None } else { Some(l.clean(cx)) };
|
||||||
BorrowedRef { lifetime, mutability: m.mutbl, type_: box m.ty.clean(cx) }
|
BorrowedRef { lifetime, mutability: m.mutbl, type_: box m.ty.clean(cx) }
|
||||||
}
|
}
|
||||||
TyKind::Slice(ref ty) => Slice(box ty.clean(cx)),
|
TyKind::Slice(ty) => Slice(box ty.clean(cx)),
|
||||||
TyKind::Array(ref ty, ref length) => {
|
TyKind::Array(ty, ref length) => {
|
||||||
let length = match length {
|
let length = match length {
|
||||||
hir::ArrayLen::Infer(_, _) => "_".to_string(),
|
hir::ArrayLen::Infer(_, _) => "_".to_string(),
|
||||||
hir::ArrayLen::Body(anon_const) => {
|
hir::ArrayLen::Body(anon_const) => {
|
||||||
|
@ -1499,7 +1499,7 @@ impl<'tcx> Clean<'tcx, Type> for hir::Ty<'tcx> {
|
||||||
let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None };
|
let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None };
|
||||||
DynTrait(bounds, lifetime)
|
DynTrait(bounds, lifetime)
|
||||||
}
|
}
|
||||||
TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
|
TyKind::BareFn(barefn) => BareFunction(box barefn.clean(cx)),
|
||||||
// Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
|
// Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
|
||||||
TyKind::Infer | TyKind::Err => Infer,
|
TyKind::Infer | TyKind::Err => Infer,
|
||||||
TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.kind),
|
TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.kind),
|
||||||
|
@ -1908,7 +1908,7 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||||
bounds: ty.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
bounds: ty.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
||||||
generics: ty.generics.clean(cx),
|
generics: ty.generics.clean(cx),
|
||||||
}),
|
}),
|
||||||
ItemKind::TyAlias(hir_ty, ref generics) => {
|
ItemKind::TyAlias(hir_ty, generics) => {
|
||||||
let rustdoc_ty = hir_ty.clean(cx);
|
let rustdoc_ty = hir_ty.clean(cx);
|
||||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
let ty = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
||||||
TypedefItem(Typedef {
|
TypedefItem(Typedef {
|
||||||
|
@ -1917,26 +1917,26 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||||
item_type: Some(ty),
|
item_type: Some(ty),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ItemKind::Enum(ref def, ref generics) => EnumItem(Enum {
|
ItemKind::Enum(ref def, generics) => EnumItem(Enum {
|
||||||
variants: def.variants.iter().map(|v| v.clean(cx)).collect(),
|
variants: def.variants.iter().map(|v| v.clean(cx)).collect(),
|
||||||
generics: generics.clean(cx),
|
generics: generics.clean(cx),
|
||||||
}),
|
}),
|
||||||
ItemKind::TraitAlias(ref generics, bounds) => TraitAliasItem(TraitAlias {
|
ItemKind::TraitAlias(generics, bounds) => TraitAliasItem(TraitAlias {
|
||||||
generics: generics.clean(cx),
|
generics: generics.clean(cx),
|
||||||
bounds: bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
bounds: bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
||||||
}),
|
}),
|
||||||
ItemKind::Union(ref variant_data, ref generics) => UnionItem(Union {
|
ItemKind::Union(ref variant_data, generics) => UnionItem(Union {
|
||||||
generics: generics.clean(cx),
|
generics: generics.clean(cx),
|
||||||
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
||||||
}),
|
}),
|
||||||
ItemKind::Struct(ref variant_data, ref generics) => StructItem(Struct {
|
ItemKind::Struct(ref variant_data, generics) => StructItem(Struct {
|
||||||
struct_type: CtorKind::from_hir(variant_data),
|
struct_type: CtorKind::from_hir(variant_data),
|
||||||
generics: generics.clean(cx),
|
generics: generics.clean(cx),
|
||||||
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
||||||
}),
|
}),
|
||||||
ItemKind::Impl(ref impl_) => return clean_impl(impl_, item.hir_id(), cx),
|
ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx),
|
||||||
// proc macros can have a name set by attributes
|
// proc macros can have a name set by attributes
|
||||||
ItemKind::Fn(ref sig, ref generics, body_id) => {
|
ItemKind::Fn(ref sig, generics, body_id) => {
|
||||||
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
|
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
|
||||||
}
|
}
|
||||||
ItemKind::Macro(ref macro_def, _) => {
|
ItemKind::Macro(ref macro_def, _) => {
|
||||||
|
@ -1945,7 +1945,7 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||||
source: display_macro_source(cx, name, macro_def, def_id, ty_vis),
|
source: display_macro_source(cx, name, macro_def, def_id, ty_vis),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
ItemKind::Trait(is_auto, unsafety, ref generics, bounds, item_ids) => {
|
ItemKind::Trait(is_auto, unsafety, generics, bounds, item_ids) => {
|
||||||
let items =
|
let items =
|
||||||
item_ids.iter().map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx)).collect();
|
item_ids.iter().map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx)).collect();
|
||||||
TraitItem(Trait {
|
TraitItem(Trait {
|
||||||
|
@ -2192,7 +2192,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
|
||||||
let def_id = item.def_id.to_def_id();
|
let def_id = item.def_id.to_def_id();
|
||||||
cx.with_param_env(def_id, |cx| {
|
cx.with_param_env(def_id, |cx| {
|
||||||
let kind = match item.kind {
|
let kind = match item.kind {
|
||||||
hir::ForeignItemKind::Fn(decl, names, ref generics) => {
|
hir::ForeignItemKind::Fn(decl, names, generics) => {
|
||||||
let (generics, decl) = enter_impl_trait(cx, |cx| {
|
let (generics, decl) = enter_impl_trait(cx, |cx| {
|
||||||
// NOTE: generics must be cleaned before args
|
// NOTE: generics must be cleaned before args
|
||||||
let generics = generics.clean(cx);
|
let generics = generics.clean(cx);
|
||||||
|
@ -2202,7 +2202,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
|
||||||
});
|
});
|
||||||
ForeignFunctionItem(Function { decl, generics })
|
ForeignFunctionItem(Function { decl, generics })
|
||||||
}
|
}
|
||||||
hir::ForeignItemKind::Static(ref ty, mutability) => {
|
hir::ForeignItemKind::Static(ty, mutability) => {
|
||||||
ForeignStaticItem(Static { type_: ty.clean(cx), mutability, expr: None })
|
ForeignStaticItem(Static { type_: ty.clean(cx), mutability, expr: None })
|
||||||
}
|
}
|
||||||
hir::ForeignItemKind::Type => ForeignTypeItem,
|
hir::ForeignItemKind::Type => ForeignTypeItem,
|
||||||
|
@ -2232,7 +2232,7 @@ impl<'tcx> Clean<'tcx, TypeBindingKind> for hir::TypeBindingKind<'tcx> {
|
||||||
hir::TypeBindingKind::Equality { ref term } => {
|
hir::TypeBindingKind::Equality { ref term } => {
|
||||||
TypeBindingKind::Equality { term: term.clean(cx) }
|
TypeBindingKind::Equality { term: term.clean(cx) }
|
||||||
}
|
}
|
||||||
hir::TypeBindingKind::Constraint { ref bounds } => TypeBindingKind::Constraint {
|
hir::TypeBindingKind::Constraint { bounds } => TypeBindingKind::Constraint {
|
||||||
bounds: bounds.iter().filter_map(|b| b.clean(cx)).collect(),
|
bounds: bounds.iter().filter_map(|b| b.clean(cx)).collect(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -171,7 +171,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||||
if state != Start && needs_space {
|
if state != Start && needs_space {
|
||||||
printer.space();
|
printer.space();
|
||||||
}
|
}
|
||||||
print_tt(printer, &tt);
|
print_tt(printer, tt);
|
||||||
state = next_state;
|
state = next_state;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -880,7 +880,7 @@ impl AttributesExt for [ast::Attribute] {
|
||||||
let mut doc_cfg = self
|
let mut doc_cfg = self
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|attr| attr.has_name(sym::doc))
|
.filter(|attr| attr.has_name(sym::doc))
|
||||||
.flat_map(|attr| attr.meta_item_list().unwrap_or_else(Vec::new))
|
.flat_map(|attr| attr.meta_item_list().unwrap_or_default())
|
||||||
.filter(|attr| attr.has_name(sym::cfg))
|
.filter(|attr| attr.has_name(sym::cfg))
|
||||||
.peekable();
|
.peekable();
|
||||||
if doc_cfg.peek().is_some() && doc_cfg_active {
|
if doc_cfg.peek().is_some() && doc_cfg_active {
|
||||||
|
@ -1011,7 +1011,7 @@ pub(crate) enum DocFragmentKind {
|
||||||
fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
|
fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
|
||||||
let s = frag.doc.as_str();
|
let s = frag.doc.as_str();
|
||||||
let mut iter = s.lines();
|
let mut iter = s.lines();
|
||||||
if s == "" {
|
if s.is_empty() {
|
||||||
out.push('\n');
|
out.push('\n');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1594,17 +1594,17 @@ impl Type {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
// Recursive cases.
|
// Recursive cases.
|
||||||
(Type::Tuple(a), Type::Tuple(b)) => {
|
(Type::Tuple(a), Type::Tuple(b)) => {
|
||||||
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(&b, cache))
|
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(b, cache))
|
||||||
}
|
}
|
||||||
(Type::Slice(a), Type::Slice(b)) => a.is_same(&b, cache),
|
(Type::Slice(a), Type::Slice(b)) => a.is_same(b, cache),
|
||||||
(Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(&b, cache),
|
(Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(b, cache),
|
||||||
(Type::RawPointer(mutability, type_), Type::RawPointer(b_mutability, b_type_)) => {
|
(Type::RawPointer(mutability, type_), Type::RawPointer(b_mutability, b_type_)) => {
|
||||||
mutability == b_mutability && type_.is_same(&b_type_, cache)
|
mutability == b_mutability && type_.is_same(b_type_, cache)
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
Type::BorrowedRef { mutability, type_, .. },
|
Type::BorrowedRef { mutability, type_, .. },
|
||||||
Type::BorrowedRef { mutability: b_mutability, type_: b_type_, .. },
|
Type::BorrowedRef { mutability: b_mutability, type_: b_type_, .. },
|
||||||
) => mutability == b_mutability && type_.is_same(&b_type_, cache),
|
) => mutability == b_mutability && type_.is_same(b_type_, cache),
|
||||||
// Placeholders and generics are equal to all other types.
|
// Placeholders and generics are equal to all other types.
|
||||||
(Type::Infer, _) | (_, Type::Infer) => true,
|
(Type::Infer, _) | (_, Type::Infer) => true,
|
||||||
(Type::Generic(_), _) | (_, Type::Generic(_)) => true,
|
(Type::Generic(_), _) | (_, Type::Generic(_)) => true,
|
||||||
|
@ -1667,7 +1667,7 @@ impl Type {
|
||||||
|
|
||||||
pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
|
pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
|
||||||
if let QPath { self_type, trait_, assoc, .. } = self {
|
if let QPath { self_type, trait_, assoc, .. } = self {
|
||||||
Some((&self_type, trait_.def_id(), *assoc.clone()))
|
Some((self_type, trait_.def_id(), *assoc.clone()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
|
@ -106,7 +106,7 @@ fn external_generic_args<'tcx>(
|
||||||
bindings: Vec<TypeBinding>,
|
bindings: Vec<TypeBinding>,
|
||||||
substs: SubstsRef<'tcx>,
|
substs: SubstsRef<'tcx>,
|
||||||
) -> GenericArgs {
|
) -> GenericArgs {
|
||||||
let args = substs_to_args(cx, &substs, has_self);
|
let args = substs_to_args(cx, substs, has_self);
|
||||||
|
|
||||||
if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() {
|
if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() {
|
||||||
let inputs =
|
let inputs =
|
||||||
|
|
|
@ -667,7 +667,7 @@ impl Options {
|
||||||
return Err(1);
|
return Err(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
let scrape_examples_options = ScrapeExamplesOptions::new(&matches, &diag)?;
|
let scrape_examples_options = ScrapeExamplesOptions::new(matches, &diag)?;
|
||||||
let with_examples = matches.opt_strs("with-examples");
|
let with_examples = matches.opt_strs("with-examples");
|
||||||
let call_locations = crate::scrape_examples::load_call_locations(with_examples, &diag)?;
|
let call_locations = crate::scrape_examples::load_call_locations(with_examples, &diag)?;
|
||||||
|
|
||||||
|
|
|
@ -228,7 +228,7 @@ fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions {
|
||||||
let test_attrs: Vec<_> = attrs
|
let test_attrs: Vec<_> = attrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|a| a.has_name(sym::doc))
|
.filter(|a| a.has_name(sym::doc))
|
||||||
.flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new))
|
.flat_map(|a| a.meta_item_list().unwrap_or_default())
|
||||||
.filter(|a| a.has_name(sym::test))
|
.filter(|a| a.has_name(sym::test))
|
||||||
.collect();
|
.collect();
|
||||||
let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
|
let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
|
||||||
|
@ -738,7 +738,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// If a parsing error happened, it's very likely that the attribute is incomplete.
|
// If a parsing error happened, it's very likely that the attribute is incomplete.
|
||||||
if !parser.parse_attribute(InnerAttrPolicy::Permitted).is_ok() {
|
if parser.parse_attribute(InnerAttrPolicy::Permitted).is_err() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// We now check if there is an unclosed delimiter for the attribute. To do so, we look at
|
// We now check if there is an unclosed delimiter for the attribute. To do so, we look at
|
||||||
|
|
|
@ -456,7 +456,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
||||||
let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() &&
|
let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() &&
|
||||||
adt.is_fundamental() {
|
adt.is_fundamental() {
|
||||||
for ty in generics {
|
for ty in generics {
|
||||||
if let Some(did) = ty.def_id(&self.cache) {
|
if let Some(did) = ty.def_id(self.cache) {
|
||||||
dids.insert(did);
|
dids.insert(did);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
//! assume that HTML output is desired, although it may be possible to redesign
|
//! assume that HTML output is desired, although it may be possible to redesign
|
||||||
//! them in the future to instead emit any format desired.
|
//! them in the future to instead emit any format desired.
|
||||||
|
|
||||||
|
use std::borrow::Cow;
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
@ -545,10 +546,10 @@ pub(crate) enum HrefError {
|
||||||
// Panics if `syms` is empty.
|
// Panics if `syms` is empty.
|
||||||
pub(crate) fn join_with_double_colon(syms: &[Symbol]) -> String {
|
pub(crate) fn join_with_double_colon(syms: &[Symbol]) -> String {
|
||||||
let mut s = String::with_capacity(estimate_item_path_byte_length(syms.len()));
|
let mut s = String::with_capacity(estimate_item_path_byte_length(syms.len()));
|
||||||
s.push_str(&syms[0].as_str());
|
s.push_str(syms[0].as_str());
|
||||||
for sym in &syms[1..] {
|
for sym in &syms[1..] {
|
||||||
s.push_str("::");
|
s.push_str("::");
|
||||||
s.push_str(&sym.as_str());
|
s.push_str(sym.as_str());
|
||||||
}
|
}
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
@ -1069,7 +1070,7 @@ impl clean::Impl {
|
||||||
write!(f, " for ")?;
|
write!(f, " for ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref ty) = self.kind.as_blanket_ty() {
|
if let Some(ty) = self.kind.as_blanket_ty() {
|
||||||
fmt_type(ty, f, use_absolute, cx)?;
|
fmt_type(ty, f, use_absolute, cx)?;
|
||||||
} else {
|
} else {
|
||||||
fmt_type(&self.for_, f, use_absolute, cx)?;
|
fmt_type(&self.for_, f, use_absolute, cx)?;
|
||||||
|
@ -1295,9 +1296,11 @@ impl clean::Visibility {
|
||||||
item_did: ItemId,
|
item_did: ItemId,
|
||||||
cx: &'a Context<'tcx>,
|
cx: &'a Context<'tcx>,
|
||||||
) -> impl fmt::Display + 'a + Captures<'tcx> {
|
) -> impl fmt::Display + 'a + Captures<'tcx> {
|
||||||
let to_print = match self {
|
use std::fmt::Write as _;
|
||||||
clean::Public => "pub ".to_owned(),
|
|
||||||
clean::Inherited => String::new(),
|
let to_print: Cow<'static, str> = match self {
|
||||||
|
clean::Public => "pub ".into(),
|
||||||
|
clean::Inherited => "".into(),
|
||||||
clean::Visibility::Restricted(vis_did) => {
|
clean::Visibility::Restricted(vis_did) => {
|
||||||
// FIXME(camelid): This may not work correctly if `item_did` is a module.
|
// FIXME(camelid): This may not work correctly if `item_did` is a module.
|
||||||
// However, rustdoc currently never displays a module's
|
// However, rustdoc currently never displays a module's
|
||||||
|
@ -1305,17 +1308,16 @@ impl clean::Visibility {
|
||||||
let parent_module = find_nearest_parent_module(cx.tcx(), item_did.expect_def_id());
|
let parent_module = find_nearest_parent_module(cx.tcx(), item_did.expect_def_id());
|
||||||
|
|
||||||
if vis_did.is_crate_root() {
|
if vis_did.is_crate_root() {
|
||||||
"pub(crate) ".to_owned()
|
"pub(crate) ".into()
|
||||||
} else if parent_module == Some(vis_did) {
|
} else if parent_module == Some(vis_did) {
|
||||||
// `pub(in foo)` where `foo` is the parent module
|
// `pub(in foo)` where `foo` is the parent module
|
||||||
// is the same as no visibility modifier
|
// is the same as no visibility modifier
|
||||||
String::new()
|
"".into()
|
||||||
} else if parent_module
|
} else if parent_module
|
||||||
.map(|parent| find_nearest_parent_module(cx.tcx(), parent))
|
.and_then(|parent| find_nearest_parent_module(cx.tcx(), parent))
|
||||||
.flatten()
|
|
||||||
== Some(vis_did)
|
== Some(vis_did)
|
||||||
{
|
{
|
||||||
"pub(super) ".to_owned()
|
"pub(super) ".into()
|
||||||
} else {
|
} else {
|
||||||
let path = cx.tcx().def_path(vis_did);
|
let path = cx.tcx().def_path(vis_did);
|
||||||
debug!("path={:?}", path);
|
debug!("path={:?}", path);
|
||||||
|
@ -1325,14 +1327,14 @@ impl clean::Visibility {
|
||||||
|
|
||||||
let mut s = "pub(in ".to_owned();
|
let mut s = "pub(in ".to_owned();
|
||||||
for seg in &path.data[..path.data.len() - 1] {
|
for seg in &path.data[..path.data.len() - 1] {
|
||||||
s.push_str(&format!("{}::", seg.data.get_opt_name().unwrap()));
|
let _ = write!(s, "{}::", seg.data.get_opt_name().unwrap());
|
||||||
}
|
}
|
||||||
s.push_str(&format!("{}) ", anchor));
|
let _ = write!(s, "{}) ", anchor);
|
||||||
s
|
s.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
display_fn(move |f| f.write_str(&to_print))
|
display_fn(move |f| write!(f, "{}", to_print))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This function is the same as print_with_space, except that it renders no links.
|
/// This function is the same as print_with_space, except that it renders no links.
|
||||||
|
@ -1358,9 +1360,7 @@ impl clean::Visibility {
|
||||||
// `pub(in foo)` where `foo` is the parent module
|
// `pub(in foo)` where `foo` is the parent module
|
||||||
// is the same as no visibility modifier
|
// is the same as no visibility modifier
|
||||||
String::new()
|
String::new()
|
||||||
} else if parent_module
|
} else if parent_module.and_then(|parent| find_nearest_parent_module(tcx, parent))
|
||||||
.map(|parent| find_nearest_parent_module(tcx, parent))
|
|
||||||
.flatten()
|
|
||||||
== Some(vis_did)
|
== Some(vis_did)
|
||||||
{
|
{
|
||||||
"pub(super) ".to_owned()
|
"pub(super) ".to_owned()
|
||||||
|
|
|
@ -1023,7 +1023,7 @@ impl Markdown<'_> {
|
||||||
let Markdown {
|
let Markdown {
|
||||||
content: md,
|
content: md,
|
||||||
links,
|
links,
|
||||||
mut ids,
|
ids,
|
||||||
error_codes: codes,
|
error_codes: codes,
|
||||||
edition,
|
edition,
|
||||||
playground,
|
playground,
|
||||||
|
@ -1046,7 +1046,7 @@ impl Markdown<'_> {
|
||||||
|
|
||||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||||
|
|
||||||
let p = HeadingLinks::new(p, None, &mut ids, heading_offset);
|
let p = HeadingLinks::new(p, None, ids, heading_offset);
|
||||||
let p = Footnotes::new(p);
|
let p = Footnotes::new(p);
|
||||||
let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
|
let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
|
||||||
let p = TableWrapper::new(p);
|
let p = TableWrapper::new(p);
|
||||||
|
@ -1059,7 +1059,7 @@ impl Markdown<'_> {
|
||||||
|
|
||||||
impl MarkdownWithToc<'_> {
|
impl MarkdownWithToc<'_> {
|
||||||
pub(crate) fn into_string(self) -> String {
|
pub(crate) fn into_string(self) -> String {
|
||||||
let MarkdownWithToc(md, mut ids, codes, edition, playground) = self;
|
let MarkdownWithToc(md, ids, codes, edition, playground) = self;
|
||||||
|
|
||||||
let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
|
let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
|
||||||
|
|
||||||
|
@ -1068,7 +1068,7 @@ impl MarkdownWithToc<'_> {
|
||||||
let mut toc = TocBuilder::new();
|
let mut toc = TocBuilder::new();
|
||||||
|
|
||||||
{
|
{
|
||||||
let p = HeadingLinks::new(p, Some(&mut toc), &mut ids, HeadingOffset::H1);
|
let p = HeadingLinks::new(p, Some(&mut toc), ids, HeadingOffset::H1);
|
||||||
let p = Footnotes::new(p);
|
let p = Footnotes::new(p);
|
||||||
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
||||||
let p = CodeBlocks::new(p, codes, edition, playground);
|
let p = CodeBlocks::new(p, codes, edition, playground);
|
||||||
|
@ -1081,7 +1081,7 @@ impl MarkdownWithToc<'_> {
|
||||||
|
|
||||||
impl MarkdownHtml<'_> {
|
impl MarkdownHtml<'_> {
|
||||||
pub(crate) fn into_string(self) -> String {
|
pub(crate) fn into_string(self) -> String {
|
||||||
let MarkdownHtml(md, mut ids, codes, edition, playground) = self;
|
let MarkdownHtml(md, ids, codes, edition, playground) = self;
|
||||||
|
|
||||||
// This is actually common enough to special-case
|
// This is actually common enough to special-case
|
||||||
if md.is_empty() {
|
if md.is_empty() {
|
||||||
|
@ -1097,7 +1097,7 @@ impl MarkdownHtml<'_> {
|
||||||
|
|
||||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||||
|
|
||||||
let p = HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1);
|
let p = HeadingLinks::new(p, None, ids, HeadingOffset::H1);
|
||||||
let p = Footnotes::new(p);
|
let p = Footnotes::new(p);
|
||||||
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
||||||
let p = CodeBlocks::new(p, codes, edition, playground);
|
let p = CodeBlocks::new(p, codes, edition, playground);
|
||||||
|
|
|
@ -232,18 +232,18 @@ impl<'tcx> Context<'tcx> {
|
||||||
|
|
||||||
let mut path = String::new();
|
let mut path = String::new();
|
||||||
for name in &names[..names.len() - 1] {
|
for name in &names[..names.len() - 1] {
|
||||||
path.push_str(&name.as_str());
|
path.push_str(name.as_str());
|
||||||
path.push('/');
|
path.push('/');
|
||||||
}
|
}
|
||||||
path.push_str(&item_path(ty, &names.last().unwrap().as_str()));
|
path.push_str(&item_path(ty, names.last().unwrap().as_str()));
|
||||||
match self.shared.redirections {
|
match self.shared.redirections {
|
||||||
Some(ref redirections) => {
|
Some(ref redirections) => {
|
||||||
let mut current_path = String::new();
|
let mut current_path = String::new();
|
||||||
for name in &self.current {
|
for name in &self.current {
|
||||||
current_path.push_str(&name.as_str());
|
current_path.push_str(name.as_str());
|
||||||
current_path.push('/');
|
current_path.push('/');
|
||||||
}
|
}
|
||||||
current_path.push_str(&item_path(ty, &names.last().unwrap().as_str()));
|
current_path.push_str(&item_path(ty, names.last().unwrap().as_str()));
|
||||||
redirections.borrow_mut().insert(current_path, path);
|
redirections.borrow_mut().insert(current_path, path);
|
||||||
}
|
}
|
||||||
None => return layout::redirect(&format!("{}{}", self.root_path(), path)),
|
None => return layout::redirect(&format!("{}{}", self.root_path(), path)),
|
||||||
|
|
|
@ -840,7 +840,7 @@ fn render_stability_since_raw(
|
||||||
let mut stability = String::new();
|
let mut stability = String::new();
|
||||||
|
|
||||||
if let Some(ver) = stable_version {
|
if let Some(ver) = stable_version {
|
||||||
stability.push_str(&ver.as_str());
|
stability.push_str(ver.as_str());
|
||||||
title.push_str(&format!("Stable since Rust version {}", ver));
|
title.push_str(&format!("Stable since Rust version {}", ver));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2299,7 +2299,7 @@ fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean
|
||||||
buf,
|
buf,
|
||||||
"foreign-impls",
|
"foreign-impls",
|
||||||
"Implementations on Foreign Types",
|
"Implementations on Foreign Types",
|
||||||
res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(&name))),
|
res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(name))),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2537,6 +2537,8 @@ fn item_ty_to_section(ty: ItemType) -> ItemSection {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
|
fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
|
||||||
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
let mut sidebar = String::new();
|
let mut sidebar = String::new();
|
||||||
|
|
||||||
let item_sections_in_use: FxHashSet<_> = items
|
let item_sections_in_use: FxHashSet<_> = items
|
||||||
|
@ -2554,7 +2556,7 @@ fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
|
||||||
.map(|it| item_ty_to_section(it.type_()))
|
.map(|it| item_ty_to_section(it.type_()))
|
||||||
.collect();
|
.collect();
|
||||||
for &sec in ItemSection::ALL.iter().filter(|sec| item_sections_in_use.contains(sec)) {
|
for &sec in ItemSection::ALL.iter().filter(|sec| item_sections_in_use.contains(sec)) {
|
||||||
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name()));
|
let _ = write!(sidebar, "<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if !sidebar.is_empty() {
|
if !sidebar.is_empty() {
|
||||||
|
@ -2798,7 +2800,7 @@ fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
|
||||||
hi - lo
|
hi - lo
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut locs = call_locations.into_iter().collect::<Vec<_>>();
|
let mut locs = call_locations.iter().collect::<Vec<_>>();
|
||||||
locs.sort_by_key(sort_criterion);
|
locs.sort_by_key(sort_criterion);
|
||||||
locs
|
locs
|
||||||
};
|
};
|
||||||
|
@ -2842,7 +2844,7 @@ fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
|
||||||
if it.peek().is_some() {
|
if it.peek().is_some() {
|
||||||
write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
|
write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
|
||||||
it.for_each(|(_, call_data)| {
|
it.for_each(|(_, call_data)| {
|
||||||
let (url, _) = link_to_loc(&call_data, &call_data.locations[0]);
|
let (url, _) = link_to_loc(call_data, &call_data.locations[0]);
|
||||||
write!(
|
write!(
|
||||||
w,
|
w,
|
||||||
r#"<li><a href="{url}">{name}</a></li>"#,
|
r#"<li><a href="{url}">{name}</a></li>"#,
|
||||||
|
|
|
@ -37,7 +37,7 @@ pub(crate) fn build_index<'tcx>(
|
||||||
desc,
|
desc,
|
||||||
parent: Some(did),
|
parent: Some(did),
|
||||||
parent_idx: None,
|
parent_idx: None,
|
||||||
search_type: get_function_type_for_search(item, tcx, &cache),
|
search_type: get_function_type_for_search(item, tcx, cache),
|
||||||
aliases: item.attrs.get_doc_aliases(),
|
aliases: item.attrs.get_doc_aliases(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -182,8 +182,8 @@ pub(crate) fn build_index<'tcx>(
|
||||||
})
|
})
|
||||||
.expect("failed serde conversion")
|
.expect("failed serde conversion")
|
||||||
// All these `replace` calls are because we have to go through JS string for JSON content.
|
// All these `replace` calls are because we have to go through JS string for JSON content.
|
||||||
.replace(r#"\"#, r"\\")
|
.replace('\\', r"\\")
|
||||||
.replace(r#"'"#, r"\'")
|
.replace('\'', r"\'")
|
||||||
// We need to escape double quotes for the JSON.
|
// We need to escape double quotes for the JSON.
|
||||||
.replace("\\\"", "\\\\\"")
|
.replace("\\\"", "\\\\\"")
|
||||||
)
|
)
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub(crate) fn collect_spans_and_sources(
|
||||||
if generate_link_to_definition {
|
if generate_link_to_definition {
|
||||||
tcx.hir().walk_toplevel_module(&mut visitor);
|
tcx.hir().walk_toplevel_module(&mut visitor);
|
||||||
}
|
}
|
||||||
let sources = sources::collect_local_sources(tcx, src_root, &krate);
|
let sources = sources::collect_local_sources(tcx, src_root, krate);
|
||||||
(sources, visitor.matches)
|
(sources, visitor.matches)
|
||||||
} else {
|
} else {
|
||||||
(Default::default(), Default::default())
|
(Default::default(), Default::default())
|
||||||
|
|
|
@ -163,15 +163,18 @@ impl TocBuilder {
|
||||||
|
|
||||||
impl Toc {
|
impl Toc {
|
||||||
fn print_inner(&self, v: &mut String) {
|
fn print_inner(&self, v: &mut String) {
|
||||||
|
use std::fmt::Write as _;
|
||||||
|
|
||||||
v.push_str("<ul>");
|
v.push_str("<ul>");
|
||||||
for entry in &self.entries {
|
for entry in &self.entries {
|
||||||
// recursively format this table of contents
|
// recursively format this table of contents
|
||||||
v.push_str(&format!(
|
let _ = write!(
|
||||||
|
v,
|
||||||
"\n<li><a href=\"#{id}\">{num} {name}</a>",
|
"\n<li><a href=\"#{id}\">{num} {name}</a>",
|
||||||
id = entry.id,
|
id = entry.id,
|
||||||
num = entry.sec_number,
|
num = entry.sec_number,
|
||||||
name = entry.name
|
name = entry.name
|
||||||
));
|
);
|
||||||
entry.children.print_inner(&mut *v);
|
entry.children.print_inner(&mut *v);
|
||||||
v.push_str("</li>");
|
v.push_str("</li>");
|
||||||
}
|
}
|
||||||
|
|
|
@ -265,7 +265,7 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
|
||||||
self.items.entry(filename).or_default().count_item(
|
self.items.entry(filename).or_default().count_item(
|
||||||
has_docs,
|
has_docs,
|
||||||
has_doc_example,
|
has_doc_example,
|
||||||
should_have_doc_example(self.ctx, &i),
|
should_have_doc_example(self.ctx, i),
|
||||||
should_have_docs,
|
should_have_docs,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl<'a, 'tcx> DocVisitor for SyntaxChecker<'a, 'tcx> {
|
||||||
sp,
|
sp,
|
||||||
);
|
);
|
||||||
for code_block in markdown::rust_code_blocks(dox, &extra) {
|
for code_block in markdown::rust_code_blocks(dox, &extra) {
|
||||||
self.check_rust_syntax(&item, dox, code_block);
|
self.check_rust_syntax(item, dox, code_block);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,9 +35,9 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
|
||||||
|
|
||||||
impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
|
impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
|
||||||
fn visit_item(&mut self, item: &Item) {
|
fn visit_item(&mut self, item: &Item) {
|
||||||
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);
|
let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
|
||||||
|
|
||||||
look_for_tests(self.cx, &dox, &item);
|
look_for_tests(self.cx, &dox, item);
|
||||||
|
|
||||||
self.visit_item_recur(item)
|
self.visit_item_recur(item)
|
||||||
}
|
}
|
||||||
|
|
|
@ -494,7 +494,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||||
DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Variant,
|
DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Variant,
|
||||||
def_id,
|
def_id,
|
||||||
) => (Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id)),
|
) => (Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id)),
|
||||||
_ => ((res, None)),
|
_ => (res, None),
|
||||||
});
|
});
|
||||||
} else if ns == MacroNS {
|
} else if ns == MacroNS {
|
||||||
return Err(UnresolvedPath {
|
return Err(UnresolvedPath {
|
||||||
|
@ -636,10 +636,9 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||||
Res::Primitive(prim) => {
|
Res::Primitive(prim) => {
|
||||||
self.resolve_primitive_associated_item(prim, ns, item_name).or_else(|| {
|
self.resolve_primitive_associated_item(prim, ns, item_name).or_else(|| {
|
||||||
self.primitive_type_to_ty(prim)
|
self.primitive_type_to_ty(prim)
|
||||||
.map(|ty| {
|
.and_then(|ty| {
|
||||||
resolve_associated_trait_item(ty, module_id, item_name, ns, self.cx)
|
resolve_associated_trait_item(ty, module_id, item_name, ns, self.cx)
|
||||||
})
|
})
|
||||||
.flatten()
|
|
||||||
.map(|item| (root_res, item.def_id))
|
.map(|item| (root_res, item.def_id))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -903,7 +902,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
|
||||||
tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc));
|
tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc));
|
||||||
}
|
}
|
||||||
for md_link in &tmp_links[&doc] {
|
for md_link in &tmp_links[&doc] {
|
||||||
let link = self.resolve_link(&item, &doc, parent_node, md_link);
|
let link = self.resolve_link(item, &doc, parent_node, md_link);
|
||||||
if let Some(link) = link {
|
if let Some(link) = link {
|
||||||
self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
|
self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
|
||||||
}
|
}
|
||||||
|
@ -1136,7 +1135,7 @@ impl LinkCollector<'_, '_> {
|
||||||
let kind = self.cx.tcx.def_kind(id);
|
let kind = self.cx.tcx.def_kind(id);
|
||||||
self.verify_disambiguator(
|
self.verify_disambiguator(
|
||||||
path_str,
|
path_str,
|
||||||
&ori_link,
|
ori_link,
|
||||||
kind,
|
kind,
|
||||||
id,
|
id,
|
||||||
disambiguator,
|
disambiguator,
|
||||||
|
@ -1150,14 +1149,14 @@ impl LinkCollector<'_, '_> {
|
||||||
&& item.item_id.is_local()
|
&& item.item_id.is_local()
|
||||||
&& !self.cx.tcx.features().intra_doc_pointers
|
&& !self.cx.tcx.features().intra_doc_pointers
|
||||||
{
|
{
|
||||||
self.report_rawptr_assoc_feature_gate(dox, &ori_link, item);
|
self.report_rawptr_assoc_feature_gate(dox, ori_link, item);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match disambiguator {
|
match disambiguator {
|
||||||
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
|
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
|
||||||
Some(other) => {
|
Some(other) => {
|
||||||
self.report_disambiguator_mismatch(
|
self.report_disambiguator_mismatch(
|
||||||
path_str, &ori_link, other, res, &diag_info,
|
path_str, ori_link, other, res, &diag_info,
|
||||||
);
|
);
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -1180,7 +1179,7 @@ impl LinkCollector<'_, '_> {
|
||||||
};
|
};
|
||||||
self.verify_disambiguator(
|
self.verify_disambiguator(
|
||||||
path_str,
|
path_str,
|
||||||
&ori_link,
|
ori_link,
|
||||||
kind_for_dis,
|
kind_for_dis,
|
||||||
id_for_dis,
|
id_for_dis,
|
||||||
disambiguator,
|
disambiguator,
|
||||||
|
@ -1274,7 +1273,7 @@ impl LinkCollector<'_, '_> {
|
||||||
}
|
}
|
||||||
suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
|
suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
|
||||||
};
|
};
|
||||||
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, callback);
|
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &MarkdownLink, item: &Item) {
|
fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &MarkdownLink, item: &Item) {
|
||||||
|
@ -1930,7 +1929,7 @@ fn anchor_failure(
|
||||||
msg: &str,
|
msg: &str,
|
||||||
anchor_idx: usize,
|
anchor_idx: usize,
|
||||||
) {
|
) {
|
||||||
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, |diag, sp| {
|
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
|
||||||
if let Some(mut sp) = sp {
|
if let Some(mut sp) = sp {
|
||||||
if let Some((fragment_offset, _)) =
|
if let Some((fragment_offset, _)) =
|
||||||
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
|
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
|
||||||
|
|
|
@ -91,11 +91,7 @@ fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if current_pos == end_pos {
|
if current_pos == end_pos { None } else { Some(current_pos) }
|
||||||
return None;
|
|
||||||
} else {
|
|
||||||
return Some(current_pos);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_html_tag(
|
fn extract_html_tag(
|
||||||
|
|
|
@ -11,7 +11,6 @@ pub(crate) trait DocVisitor: Sized {
|
||||||
StrippedItem(..) => unreachable!(),
|
StrippedItem(..) => unreachable!(),
|
||||||
ModuleItem(i) => {
|
ModuleItem(i) => {
|
||||||
self.visit_mod(i);
|
self.visit_mod(i);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
StructItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
StructItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
||||||
UnionItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
UnionItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
||||||
|
|
|
@ -365,7 +365,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||||
om.items.push((item, renamed));
|
om.items.push((item, renamed));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hir::ItemKind::Impl(ref impl_) => {
|
hir::ItemKind::Impl(impl_) => {
|
||||||
// Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
|
// Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
|
||||||
// them up regardless of where they're located.
|
// them up regardless of where they're located.
|
||||||
if !self.inlining && impl_.of_trait.is_none() {
|
if !self.inlining && impl_.of_trait.is_none() {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue