1
Fork 0

Fix clippy lints in librustdoc

This commit is contained in:
Guillaume Gomez 2021-10-01 17:12:39 +02:00
parent 56694b0453
commit 4614ca4541
27 changed files with 194 additions and 221 deletions

View file

@ -136,7 +136,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
let f = auto_trait::AutoTraitFinder::new(tcx); let f = auto_trait::AutoTraitFinder::new(tcx);
debug!("get_auto_trait_impls({:?})", ty); debug!("get_auto_trait_impls({:?})", ty);
let auto_traits: Vec<_> = self.cx.auto_traits.iter().cloned().collect(); let auto_traits: Vec<_> = self.cx.auto_traits.iter().copied().collect();
let mut auto_traits: Vec<Item> = auto_traits let mut auto_traits: Vec<Item> = auto_traits
.into_iter() .into_iter()
.filter_map(|trait_def_id| { .filter_map(|trait_def_id| {
@ -193,8 +193,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
// to its smaller and larger regions. Note that 'larger' regions correspond // to its smaller and larger regions. Note that 'larger' regions correspond
// to sub-regions in Rust code (e.g., in 'a: 'b, 'a is the larger region). // to sub-regions in Rust code (e.g., in 'a: 'b, 'a is the larger region).
for constraint in regions.constraints.keys() { for constraint in regions.constraints.keys() {
match constraint { match *constraint {
&Constraint::VarSubVar(r1, r2) => { Constraint::VarSubVar(r1, r2) => {
{ {
let deps1 = vid_map.entry(RegionTarget::RegionVid(r1)).or_default(); let deps1 = vid_map.entry(RegionTarget::RegionVid(r1)).or_default();
deps1.larger.insert(RegionTarget::RegionVid(r2)); deps1.larger.insert(RegionTarget::RegionVid(r2));
@ -203,15 +203,15 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
let deps2 = vid_map.entry(RegionTarget::RegionVid(r2)).or_default(); let deps2 = vid_map.entry(RegionTarget::RegionVid(r2)).or_default();
deps2.smaller.insert(RegionTarget::RegionVid(r1)); deps2.smaller.insert(RegionTarget::RegionVid(r1));
} }
&Constraint::RegSubVar(region, vid) => { Constraint::RegSubVar(region, vid) => {
let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default(); let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
deps.smaller.insert(RegionTarget::Region(region)); deps.smaller.insert(RegionTarget::Region(region));
} }
&Constraint::VarSubReg(vid, region) => { Constraint::VarSubReg(vid, region) => {
let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default(); let deps = vid_map.entry(RegionTarget::RegionVid(vid)).or_default();
deps.larger.insert(RegionTarget::Region(region)); deps.larger.insert(RegionTarget::Region(region));
} }
&Constraint::RegSubReg(r1, r2) => { Constraint::RegSubReg(r1, r2) => {
// The constraint is already in the form that we want, so we're done with it // The constraint is already in the form that we want, so we're done with it
// Desired order is 'larger, smaller', so flip then // Desired order is 'larger, smaller', so flip then
if region_name(r1) != region_name(r2) { if region_name(r1) != region_name(r2) {
@ -513,8 +513,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
// as we want to combine them with any 'Output' qpaths // as we want to combine them with any 'Output' qpaths
// later // later
let is_fn = match &mut b { let is_fn = match b {
&mut GenericBound::TraitBound(ref mut p, _) => { GenericBound::TraitBound(ref mut p, _) => {
// Insert regions into the for_generics hash map first, to ensure // Insert regions into the for_generics hash map first, to ensure
// that we don't end up with duplicate bounds (e.g., for<'b, 'b>) // that we don't end up with duplicate bounds (e.g., for<'b, 'b>)
for_generics.extend(p.generic_params.clone()); for_generics.extend(p.generic_params.clone());
@ -699,8 +699,8 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
} }
fn region_name(region: Region<'_>) -> Option<Symbol> { fn region_name(region: Region<'_>) -> Option<Symbol> {
match region { match *region {
&ty::ReEarlyBound(r) => Some(r.name), ty::ReEarlyBound(r) => Some(r.name),
_ => None, _ => None,
} }
} }
@ -717,8 +717,8 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> {
} }
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
(match r { (match *r {
&ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(), ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(),
_ => None, _ => None,
}) })
.unwrap_or_else(|| r.super_fold_with(self)) .unwrap_or_else(|| r.super_fold_with(self))

View file

@ -216,17 +216,15 @@ impl<'tcx> Clean<GenericBound> for ty::PolyTraitRef<'tcx> {
impl Clean<Lifetime> for hir::Lifetime { impl Clean<Lifetime> for hir::Lifetime {
fn clean(&self, cx: &mut DocContext<'_>) -> Lifetime { fn clean(&self, cx: &mut DocContext<'_>) -> Lifetime {
let def = cx.tcx.named_region(self.hir_id); let def = cx.tcx.named_region(self.hir_id);
match def { if let Some(
Some( rl::Region::EarlyBound(_, node_id, _)
rl::Region::EarlyBound(_, node_id, _) | rl::Region::LateBound(_, _, node_id, _)
| rl::Region::LateBound(_, _, node_id, _) | rl::Region::Free(_, node_id),
| rl::Region::Free(_, node_id), ) = def
) => { {
if let Some(lt) = cx.lt_substs.get(&node_id).cloned() { if let Some(lt) = cx.lt_substs.get(&node_id).cloned() {
return lt; return lt;
}
} }
_ => {}
} }
Lifetime(self.name.ident().name) Lifetime(self.name.ident().name)
} }
@ -828,7 +826,7 @@ impl<'a> Clean<Arguments> for (&'a [hir::Ty<'a>], hir::BodyId) {
.iter() .iter()
.enumerate() .enumerate()
.map(|(i, ty)| Argument { .map(|(i, ty)| Argument {
name: name_from_pat(&body.params[i].pat), name: name_from_pat(body.params[i].pat),
type_: ty.clean(cx), type_: ty.clean(cx),
}) })
.collect(), .collect(),
@ -924,7 +922,7 @@ impl Clean<Item> for hir::TraitItem<'_> {
} }
MethodItem(m, None) MethodItem(m, None)
} }
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(ref names)) => { hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
let (generics, decl) = enter_impl_trait(cx, |cx| { let (generics, decl) = enter_impl_trait(cx, |cx| {
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx)) (self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
}); });
@ -936,7 +934,7 @@ impl Clean<Item> for hir::TraitItem<'_> {
} }
TyMethodItem(t) TyMethodItem(t)
} }
hir::TraitItemKind::Type(ref bounds, ref default) => { hir::TraitItemKind::Type(bounds, ref default) => {
AssocTypeItem(bounds.clean(cx), default.clean(cx)) AssocTypeItem(bounds.clean(cx), default.clean(cx))
} }
}; };
@ -1260,7 +1258,7 @@ fn clean_qpath(hir_ty: &hir::Ty<'_>, cx: &mut DocContext<'_>) -> Type {
let path = path.clean(cx); let path = path.clean(cx);
resolve_type(cx, path) resolve_type(cx, path)
} }
hir::QPath::Resolved(Some(ref qself), ref p) => { hir::QPath::Resolved(Some(ref qself), p) => {
// Try to normalize `<X as Y>::T` to a type // Try to normalize `<X as Y>::T` to a type
let ty = hir_ty_to_ty(cx.tcx, hir_ty); let ty = hir_ty_to_ty(cx.tcx, hir_ty);
if let Some(normalized_value) = normalize(cx, ty) { if let Some(normalized_value) = normalize(cx, ty) {
@ -1281,7 +1279,7 @@ fn clean_qpath(hir_ty: &hir::Ty<'_>, cx: &mut DocContext<'_>) -> Type {
trait_, trait_,
} }
} }
hir::QPath::TypeRelative(ref qself, ref segment) => { hir::QPath::TypeRelative(ref qself, segment) => {
let ty = hir_ty_to_ty(cx.tcx, hir_ty); let ty = hir_ty_to_ty(cx.tcx, hir_ty);
let res = match ty.kind() { let res = match ty.kind() {
ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id), ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
@ -1337,7 +1335,7 @@ impl Clean<Type> for hir::Ty<'_> {
let length = print_const(cx, ct.eval(cx.tcx, param_env)); let length = print_const(cx, ct.eval(cx.tcx, param_env));
Array(box ty.clean(cx), length) Array(box ty.clean(cx), length)
} }
TyKind::Tup(ref tys) => Tuple(tys.clean(cx)), TyKind::Tup(tys) => Tuple(tys.clean(cx)),
TyKind::OpaqueDef(item_id, _) => { TyKind::OpaqueDef(item_id, _) => {
let item = cx.tcx.hir().item(item_id); let item = cx.tcx.hir().item(item_id);
if let hir::ItemKind::OpaqueTy(ref ty) = item.kind { if let hir::ItemKind::OpaqueTy(ref ty) = item.kind {
@ -1346,8 +1344,8 @@ impl Clean<Type> for hir::Ty<'_> {
unreachable!() unreachable!()
} }
} }
TyKind::Path(_) => clean_qpath(&self, cx), TyKind::Path(_) => clean_qpath(self, cx),
TyKind::TraitObject(ref bounds, ref lifetime, _) => { TyKind::TraitObject(bounds, ref lifetime, _) => {
let bounds = bounds.iter().map(|bound| bound.clean(cx)).collect(); let bounds = bounds.iter().map(|bound| bound.clean(cx)).collect();
let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None }; let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None };
DynTrait(bounds, lifetime) DynTrait(bounds, lifetime)
@ -1441,7 +1439,7 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
let path = external_path(cx, did, false, vec![], InternalSubsts::empty()); let path = external_path(cx, did, false, vec![], InternalSubsts::empty());
ResolvedPath { path, did } ResolvedPath { path, did }
} }
ty::Dynamic(ref obj, ref reg) => { ty::Dynamic(obj, ref reg) => {
// HACK: pick the first `did` as the `did` of the trait object. Someone // HACK: pick the first `did` as the `did` of the trait object. Someone
// might want to implement "native" support for marker-trait-only // might want to implement "native" support for marker-trait-only
// trait objects. // trait objects.
@ -1481,9 +1479,7 @@ impl<'tcx> Clean<Type> for Ty<'tcx> {
DynTrait(bounds, lifetime) DynTrait(bounds, lifetime)
} }
ty::Tuple(ref t) => { ty::Tuple(t) => Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx)),
Tuple(t.iter().map(|t| t.expect_ty()).collect::<Vec<_>>().clean(cx))
}
ty::Projection(ref data) => data.clean(cx), ty::Projection(ref data) => data.clean(cx),
@ -1821,9 +1817,9 @@ impl Clean<Vec<Item>> for (&hir::Item<'_>, Option<Symbol>) {
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx) clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
} }
ItemKind::Macro(ref macro_def) => MacroItem(Macro { ItemKind::Macro(ref macro_def) => MacroItem(Macro {
source: display_macro_source(cx, name, &macro_def, def_id, &item.vis), source: display_macro_source(cx, name, macro_def, def_id, &item.vis),
}), }),
ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref item_ids) => { ItemKind::Trait(is_auto, unsafety, ref generics, bounds, item_ids) => {
let items = item_ids let items = item_ids
.iter() .iter()
.map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx)) .map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx))
@ -2065,10 +2061,10 @@ impl Clean<Item> for (&hir::ForeignItem<'_>, Option<Symbol>) {
let def_id = item.def_id.to_def_id(); let def_id = item.def_id.to_def_id();
cx.with_param_env(def_id, |cx| { cx.with_param_env(def_id, |cx| {
let kind = match item.kind { let kind = match item.kind {
hir::ForeignItemKind::Fn(ref decl, ref names, ref generics) => { hir::ForeignItemKind::Fn(decl, names, ref generics) => {
let abi = cx.tcx.hir().get_foreign_abi(item.hir_id()); let abi = cx.tcx.hir().get_foreign_abi(item.hir_id());
let (generics, decl) = enter_impl_trait(cx, |cx| { let (generics, decl) = enter_impl_trait(cx, |cx| {
(generics.clean(cx), (&**decl, &names[..]).clean(cx)) (generics.clean(cx), (&*decl, &names[..]).clean(cx))
}); });
ForeignFunctionItem(Function { ForeignFunctionItem(Function {
decl, decl,
@ -2113,7 +2109,7 @@ impl Clean<TypeBindingKind> for hir::TypeBindingKind<'_> {
hir::TypeBindingKind::Equality { ref ty } => { hir::TypeBindingKind::Equality { ref ty } => {
TypeBindingKind::Equality { ty: ty.clean(cx) } TypeBindingKind::Equality { ty: ty.clean(cx) }
} }
hir::TypeBindingKind::Constraint { ref bounds } => { hir::TypeBindingKind::Constraint { bounds } => {
TypeBindingKind::Constraint { bounds: bounds.iter().map(|b| b.clean(cx)).collect() } TypeBindingKind::Constraint { bounds: bounds.iter().map(|b| b.clean(cx)).collect() }
} }
} }

View file

@ -204,7 +204,7 @@ impl ExternalCrate {
.filter_map(|a| a.value_str()) .filter_map(|a| a.value_str())
.map(to_remote) .map(to_remote)
.next() .next()
.or(extern_url.map(to_remote)) // NOTE: only matters if `extern_url_takes_precedence` is false .or_else(|| extern_url.map(to_remote)) // NOTE: only matters if `extern_url_takes_precedence` is false
.unwrap_or(Unknown) // Well, at least we tried. .unwrap_or(Unknown) // Well, at least we tried.
} }
@ -238,7 +238,7 @@ impl ExternalCrate {
hir::ItemKind::Mod(_) => { hir::ItemKind::Mod(_) => {
as_keyword(Res::Def(DefKind::Mod, id.def_id.to_def_id())) as_keyword(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
} }
hir::ItemKind::Use(ref path, hir::UseKind::Single) hir::ItemKind::Use(path, hir::UseKind::Single)
if item.vis.node.is_pub() => if item.vis.node.is_pub() =>
{ {
as_keyword(path.res.expect_non_local()) as_keyword(path.res.expect_non_local())
@ -304,7 +304,7 @@ impl ExternalCrate {
hir::ItemKind::Mod(_) => { hir::ItemKind::Mod(_) => {
as_primitive(Res::Def(DefKind::Mod, id.def_id.to_def_id())) as_primitive(Res::Def(DefKind::Mod, id.def_id.to_def_id()))
} }
hir::ItemKind::Use(ref path, hir::UseKind::Single) hir::ItemKind::Use(path, hir::UseKind::Single)
if item.vis.node.is_pub() => if item.vis.node.is_pub() =>
{ {
as_primitive(path.res.expect_non_local()).map(|(_, prim)| { as_primitive(path.res.expect_non_local()).map(|(_, prim)| {
@ -381,7 +381,7 @@ impl Item {
{ {
*span *span
} else { } else {
self.def_id.as_def_id().map(|did| rustc_span(did, tcx)).unwrap_or_else(|| Span::dummy()) self.def_id.as_def_id().map(|did| rustc_span(did, tcx)).unwrap_or_else(Span::dummy)
} }
} }
@ -562,7 +562,7 @@ impl Item {
} }
crate fn stability_class(&self, tcx: TyCtxt<'_>) -> Option<String> { crate fn stability_class(&self, tcx: TyCtxt<'_>) -> Option<String> {
self.stability(tcx).as_ref().and_then(|ref s| { self.stability(tcx).as_ref().and_then(|s| {
let mut classes = Vec::with_capacity(2); let mut classes = Vec::with_capacity(2);
if s.level.is_unstable() { if s.level.is_unstable() {
@ -820,9 +820,9 @@ impl AttributesExt for [ast::Attribute] {
// #[doc(cfg(...))] // #[doc(cfg(...))]
if let Some(cfg_mi) = item if let Some(cfg_mi) = item
.meta_item() .meta_item()
.and_then(|item| rustc_expand::config::parse_cfg(&item, sess)) .and_then(|item| rustc_expand::config::parse_cfg(item, sess))
{ {
match Cfg::parse(&cfg_mi) { match Cfg::parse(cfg_mi) {
Ok(new_cfg) => cfg &= new_cfg, Ok(new_cfg) => cfg &= new_cfg,
Err(e) => sess.span_err(e.span, e.msg), Err(e) => sess.span_err(e.span, e.msg),
} }
@ -934,7 +934,7 @@ impl<'a> FromIterator<&'a DocFragment> for String {
T: IntoIterator<Item = &'a DocFragment>, T: IntoIterator<Item = &'a DocFragment>,
{ {
iter.into_iter().fold(String::new(), |mut acc, frag| { iter.into_iter().fold(String::new(), |mut acc, frag| {
add_doc_fragment(&mut acc, &frag); add_doc_fragment(&mut acc, frag);
acc acc
}) })
} }
@ -1061,12 +1061,12 @@ impl Attributes {
let ori = iter.next()?; let ori = iter.next()?;
let mut out = String::new(); let mut out = String::new();
add_doc_fragment(&mut out, &ori); add_doc_fragment(&mut out, ori);
while let Some(new_frag) = iter.next() { for new_frag in iter {
if new_frag.kind != ori.kind || new_frag.parent_module != ori.parent_module { if new_frag.kind != ori.kind || new_frag.parent_module != ori.parent_module {
break; break;
} }
add_doc_fragment(&mut out, &new_frag); add_doc_fragment(&mut out, new_frag);
} }
if out.is_empty() { None } else { Some(out) } if out.is_empty() { None } else { Some(out) }
} }
@ -1079,7 +1079,7 @@ impl Attributes {
for new_frag in self.doc_strings.iter() { for new_frag in self.doc_strings.iter() {
let out = ret.entry(new_frag.parent_module).or_default(); let out = ret.entry(new_frag.parent_module).or_default();
add_doc_fragment(out, &new_frag); add_doc_fragment(out, new_frag);
} }
ret ret
} }

View file

@ -171,8 +171,8 @@ crate fn strip_path_generics(path: Path) -> Path {
crate fn qpath_to_string(p: &hir::QPath<'_>) -> String { crate fn qpath_to_string(p: &hir::QPath<'_>) -> String {
let segments = match *p { let segments = match *p {
hir::QPath::Resolved(_, ref path) => &path.segments, hir::QPath::Resolved(_, path) => &path.segments,
hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(), hir::QPath::TypeRelative(_, segment) => return segment.ident.to_string(),
hir::QPath::LangItem(lang_item, ..) => return lang_item.name().to_string(), hir::QPath::LangItem(lang_item, ..) => return lang_item.name().to_string(),
}; };
@ -217,15 +217,15 @@ crate fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
PatKind::Wild | PatKind::Struct(..) => return kw::Underscore, PatKind::Wild | PatKind::Struct(..) => return kw::Underscore,
PatKind::Binding(_, _, ident, _) => return ident.name, PatKind::Binding(_, _, ident, _) => return ident.name,
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p), PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Or(ref pats) => { PatKind::Or(pats) => {
pats.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(" | ") pats.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(" | ")
} }
PatKind::Tuple(ref elts, _) => format!( PatKind::Tuple(elts, _) => format!(
"({})", "({})",
elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ") elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
), ),
PatKind::Box(ref p) => return name_from_pat(&**p), PatKind::Box(p) => return name_from_pat(&*p),
PatKind::Ref(ref p, _) => return name_from_pat(&**p), PatKind::Ref(p, _) => return name_from_pat(&*p),
PatKind::Lit(..) => { PatKind::Lit(..) => {
warn!( warn!(
"tried to get argument name from PatKind::Lit, which is silly in function arguments" "tried to get argument name from PatKind::Lit, which is silly in function arguments"
@ -233,7 +233,7 @@ crate fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
return Symbol::intern("()"); return Symbol::intern("()");
} }
PatKind::Range(..) => return kw::Underscore, PatKind::Range(..) => return kw::Underscore,
PatKind::Slice(ref begin, ref mid, ref end) => { PatKind::Slice(begin, ref mid, end) => {
let begin = begin.iter().map(|p| name_from_pat(p).to_string()); let begin = begin.iter().map(|p| name_from_pat(p).to_string());
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter(); let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
let end = end.iter().map(|p| name_from_pat(p).to_string()); let end = end.iter().map(|p| name_from_pat(p).to_string());
@ -507,7 +507,7 @@ crate fn has_doc_flag(attrs: ty::Attributes<'_>, flag: Symbol) -> bool {
/// so that the channel is consistent. /// so that the channel is consistent.
/// ///
/// Set by `bootstrap::Builder::doc_rust_lang_org_channel` in order to keep tests passing on beta/stable. /// Set by `bootstrap::Builder::doc_rust_lang_org_channel` in order to keep tests passing on beta/stable.
crate const DOC_RUST_LANG_ORG_CHANNEL: &'static str = env!("DOC_RUST_LANG_ORG_CHANNEL"); crate const DOC_RUST_LANG_ORG_CHANNEL: &str = env!("DOC_RUST_LANG_ORG_CHANNEL");
/// Render a sequence of macro arms in a format suitable for displaying to the user /// Render a sequence of macro arms in a format suitable for displaying to the user
/// as part of an item declaration. /// as part of an item declaration.

View file

@ -321,13 +321,13 @@ impl Options {
/// been printed, returns `Err` with the exit code. /// been printed, returns `Err` with the exit code.
crate fn from_matches(matches: &getopts::Matches) -> Result<Options, i32> { crate fn from_matches(matches: &getopts::Matches) -> Result<Options, i32> {
// Check for unstable options. // Check for unstable options.
nightly_options::check_nightly_options(&matches, &opts()); nightly_options::check_nightly_options(matches, &opts());
if matches.opt_present("h") || matches.opt_present("help") { if matches.opt_present("h") || matches.opt_present("help") {
crate::usage("rustdoc"); crate::usage("rustdoc");
return Err(0); return Err(0);
} else if matches.opt_present("version") { } else if matches.opt_present("version") {
rustc_driver::version("rustdoc", &matches); rustc_driver::version("rustdoc", matches);
return Err(0); return Err(0);
} }
@ -363,10 +363,10 @@ impl Options {
return Err(0); return Err(0);
} }
let color = config::parse_color(&matches); let color = config::parse_color(matches);
let config::JsonConfig { json_rendered, json_unused_externs, .. } = let config::JsonConfig { json_rendered, json_unused_externs, .. } =
config::parse_json(&matches); config::parse_json(matches);
let error_format = config::parse_error_format(&matches, color, json_rendered); let error_format = config::parse_error_format(matches, color, json_rendered);
let codegen_options = CodegenOptions::build(matches, error_format); let codegen_options = CodegenOptions::build(matches, error_format);
let debugging_opts = DebuggingOptions::build(matches, error_format); let debugging_opts = DebuggingOptions::build(matches, error_format);
@ -374,7 +374,7 @@ impl Options {
let diag = new_handler(error_format, None, &debugging_opts); let diag = new_handler(error_format, None, &debugging_opts);
// check for deprecated options // check for deprecated options
check_deprecated_options(&matches, &diag); check_deprecated_options(matches, &diag);
let mut emit = Vec::new(); let mut emit = Vec::new();
for list in matches.opt_strs("emit") { for list in matches.opt_strs("emit") {
@ -440,8 +440,8 @@ impl Options {
.iter() .iter()
.map(|s| SearchPath::from_cli_opt(s, error_format)) .map(|s| SearchPath::from_cli_opt(s, error_format))
.collect(); .collect();
let externs = parse_externs(&matches, &debugging_opts, error_format); let externs = parse_externs(matches, &debugging_opts, error_format);
let extern_html_root_urls = match parse_extern_html_roots(&matches) { let extern_html_root_urls = match parse_extern_html_roots(matches) {
Ok(ex) => ex, Ok(ex) => ex,
Err(err) => { Err(err) => {
diag.struct_err(err).emit(); diag.struct_err(err).emit();
@ -560,7 +560,7 @@ impl Options {
} }
} }
let edition = config::parse_crate_edition(&matches); let edition = config::parse_crate_edition(matches);
let mut id_map = html::markdown::IdMap::new(); let mut id_map = html::markdown::IdMap::new();
let external_html = match ExternalHtml::load( let external_html = match ExternalHtml::load(
@ -569,7 +569,7 @@ impl Options {
&matches.opt_strs("html-after-content"), &matches.opt_strs("html-after-content"),
&matches.opt_strs("markdown-before-content"), &matches.opt_strs("markdown-before-content"),
&matches.opt_strs("markdown-after-content"), &matches.opt_strs("markdown-after-content"),
nightly_options::match_is_nightly_build(&matches), nightly_options::match_is_nightly_build(matches),
&diag, &diag,
&mut id_map, &mut id_map,
edition, edition,

View file

@ -85,7 +85,7 @@ crate struct DocContext<'tcx> {
impl<'tcx> DocContext<'tcx> { impl<'tcx> DocContext<'tcx> {
crate fn sess(&self) -> &'tcx Session { crate fn sess(&self) -> &'tcx Session {
&self.tcx.sess self.tcx.sess
} }
crate fn with_param_env<T, F: FnOnce(&mut Self) -> T>(&mut self, def_id: DefId, f: F) -> T { crate fn with_param_env<T, F: FnOnce(&mut Self) -> T>(&mut self, def_id: DefId, f: F) -> T {
@ -464,7 +464,7 @@ crate fn run_global_ctxt(
_ => continue, _ => continue,
}; };
for name in value.as_str().split_whitespace() { for name in value.as_str().split_whitespace() {
let span = attr.name_value_literal_span().unwrap_or(attr.span()); let span = attr.name_value_literal_span().unwrap_or_else(|| attr.span());
manual_passes.extend(parse_pass(name, Some(span))); manual_passes.extend(parse_pass(name, Some(span)));
} }
} }

View file

@ -73,7 +73,7 @@ crate fn run(options: Options) -> Result<(), ErrorReported> {
search_paths: options.libs.clone(), search_paths: options.libs.clone(),
crate_types, crate_types,
lint_opts: if !options.display_doctest_warnings { lint_opts } else { vec![] }, lint_opts: if !options.display_doctest_warnings { lint_opts } else { vec![] },
lint_cap: Some(options.lint_cap.unwrap_or_else(|| lint::Forbid)), lint_cap: Some(options.lint_cap.unwrap_or(lint::Forbid)),
cg: options.codegen_options.clone(), cg: options.codegen_options.clone(),
externs: options.externs.clone(), externs: options.externs.clone(),
unstable_features: options.render_options.unstable_features, unstable_features: options.render_options.unstable_features,
@ -176,7 +176,7 @@ crate fn run(options: Options) -> Result<(), ErrorReported> {
.iter() .iter()
.map(|uexts| uexts.unused_extern_names.iter().collect::<FxHashSet<&String>>()) .map(|uexts| uexts.unused_extern_names.iter().collect::<FxHashSet<&String>>())
.fold(extern_names, |uextsa, uextsb| { .fold(extern_names, |uextsa, uextsb| {
uextsa.intersection(&uextsb).map(|v| *v).collect::<FxHashSet<&String>>() uextsa.intersection(&uextsb).copied().collect::<FxHashSet<&String>>()
}) })
.iter() .iter()
.map(|v| (*v).clone()) .map(|v| (*v).clone())
@ -423,7 +423,7 @@ fn run_test(
// Add a \n to the end to properly terminate the last line, // Add a \n to the end to properly terminate the last line,
// but only if there was output to be printed // but only if there was output to be printed
if out_lines.len() > 0 { if !out_lines.is_empty() {
out_lines.push(""); out_lines.push("");
} }
@ -1124,7 +1124,7 @@ impl<'a, 'hir, 'tcx> HirCollector<'a, 'hir, 'tcx> {
let mut attrs = Attributes::from_ast(ast_attrs, None); let mut attrs = Attributes::from_ast(ast_attrs, None);
if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) { if let Some(ref cfg) = ast_attrs.cfg(self.tcx, &FxHashSet::default()) {
if !cfg.matches(&self.sess.parse_sess, Some(&self.sess.features_untracked())) { if !cfg.matches(&self.sess.parse_sess, Some(self.sess.features_untracked())) {
return; return;
} }
} }

View file

@ -292,7 +292,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
// inserted later on when serializing the search-index. // inserted later on when serializing the search-index.
if item.def_id.index().map_or(false, |idx| idx != CRATE_DEF_INDEX) { if item.def_id.index().map_or(false, |idx| idx != CRATE_DEF_INDEX) {
let desc = item.doc_value().map_or_else(String::new, |x| { let desc = item.doc_value().map_or_else(String::new, |x| {
short_markdown_summary(&x.as_str(), &item.link_names(&self.cache)) short_markdown_summary(x.as_str(), &item.link_names(self.cache))
}); });
self.cache.search_index.push(IndexItem { self.cache.search_index.push(IndexItem {
ty: item.type_(), ty: item.type_(),
@ -462,7 +462,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
let impl_item = Impl { impl_item: item }; let impl_item = Impl { impl_item: item };
if impl_item.trait_did().map_or(true, |d| self.cache.traits.contains_key(&d)) { if impl_item.trait_did().map_or(true, |d| self.cache.traits.contains_key(&d)) {
for did in dids { for did in dids {
self.cache.impls.entry(did).or_insert(vec![]).push(impl_item.clone()); self.cache.impls.entry(did).or_insert_with(Vec::new).push(impl_item.clone());
} }
} else { } else {
let trait_did = impl_item.trait_did().expect("no trait did"); let trait_did = impl_item.trait_did().expect("no trait did");

View file

@ -597,7 +597,7 @@ crate fn href_relative_parts<'a>(fqp: &'a [String], relative_to_fqp: &'a [String
/// Used when rendering a `ResolvedPath` structure. This invokes the `path` /// Used when rendering a `ResolvedPath` structure. This invokes the `path`
/// rendering function with the necessary arguments for linking to a local path. /// rendering function with the necessary arguments for linking to a local path.
fn resolved_path<'a, 'cx: 'a>( fn resolved_path<'cx>(
w: &mut fmt::Formatter<'_>, w: &mut fmt::Formatter<'_>,
did: DefId, did: DefId,
path: &clean::Path, path: &clean::Path,
@ -696,7 +696,7 @@ fn primitive_link(
/// Helper to render type parameters /// Helper to render type parameters
fn tybounds<'a, 'tcx: 'a>( fn tybounds<'a, 'tcx: 'a>(
bounds: &'a Vec<clean::PolyTrait>, bounds: &'a [clean::PolyTrait],
lt: &'a Option<clean::Lifetime>, lt: &'a Option<clean::Lifetime>,
cx: &'a Context<'tcx>, cx: &'a Context<'tcx>,
) -> impl fmt::Display + 'a + Captures<'tcx> { ) -> impl fmt::Display + 'a + Captures<'tcx> {
@ -886,7 +886,7 @@ fn fmt_type<'cx>(
if bounds.len() > 1 || trait_lt.is_some() => if bounds.len() > 1 || trait_lt.is_some() =>
{ {
write!(f, "{}{}{}(", amp, lt, m)?; write!(f, "{}{}{}(", amp, lt, m)?;
fmt_type(&ty, f, use_absolute, cx)?; fmt_type(ty, f, use_absolute, cx)?;
write!(f, ")") write!(f, ")")
} }
clean::Generic(..) => { clean::Generic(..) => {
@ -896,11 +896,11 @@ fn fmt_type<'cx>(
&format!("{}{}{}", amp, lt, m), &format!("{}{}{}", amp, lt, m),
cx, cx,
)?; )?;
fmt_type(&ty, f, use_absolute, cx) fmt_type(ty, f, use_absolute, cx)
} }
_ => { _ => {
write!(f, "{}{}{}", amp, lt, m)?; write!(f, "{}{}{}", amp, lt, m)?;
fmt_type(&ty, f, use_absolute, cx) fmt_type(ty, f, use_absolute, cx)
} }
} }
} }

View file

@ -62,7 +62,7 @@ crate fn render_with_highlighting(
} }
write_header(out, class, extra_content); write_header(out, class, extra_content);
write_code(out, &src, edition, context_info, decoration_info); write_code(out, src, edition, context_info, decoration_info);
write_footer(out, playground_button); write_footer(out, playground_button);
} }
@ -718,7 +718,7 @@ fn string<T: Display>(
.map(|(url, _, _)| url) .map(|(url, _, _)| url)
} }
LinkFromSrc::Primitive(prim) => format::href_with_root_path( LinkFromSrc::Primitive(prim) => format::href_with_root_path(
PrimitiveType::primitive_locations(context.tcx())[&prim], PrimitiveType::primitive_locations(context.tcx())[prim],
context, context,
Some(context_info.root_path), Some(context_info.root_path),
) )

View file

@ -68,10 +68,8 @@ crate fn render<T: Print, S: Print>(
let krate_with_trailing_slash = ensure_trailing_slash(&layout.krate).to_string(); let krate_with_trailing_slash = ensure_trailing_slash(&layout.krate).to_string();
let style_files = style_files let style_files = style_files
.iter() .iter()
.filter_map(|t| { .filter_map(|t| t.path.file_stem().map(|stem| (stem, t.disabled)))
if let Some(stem) = t.path.file_stem() { Some((stem, t.disabled)) } else { None } .filter_map(|t| t.0.to_str().map(|path| (path, t.1)))
})
.filter_map(|t| if let Some(path) = t.0.to_str() { Some((path, t.1)) } else { None })
.map(|t| { .map(|t| {
format!( format!(
r#"<link rel="stylesheet" type="text/css" href="{}.css" {} {}>"#, r#"<link rel="stylesheet" type="text/css" href="{}.css" {} {}>"#,

View file

@ -178,7 +178,7 @@ fn map_line(s: &str) -> Line<'_> {
Line::Shown(Cow::Owned(s.replacen("##", "#", 1))) Line::Shown(Cow::Owned(s.replacen("##", "#", 1)))
} else if let Some(stripped) = trimmed.strip_prefix("# ") { } else if let Some(stripped) = trimmed.strip_prefix("# ") {
// # text // # text
Line::Hidden(&stripped) Line::Hidden(stripped)
} else if trimmed == "#" { } else if trimmed == "#" {
// We cannot handle '#text' because it could be #[attr]. // We cannot handle '#text' because it could be #[attr].
Line::Hidden("") Line::Hidden("")
@ -258,7 +258,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
let parse_result = match kind { let parse_result = match kind {
CodeBlockKind::Fenced(ref lang) => { CodeBlockKind::Fenced(ref lang) => {
let parse_result = let parse_result =
LangString::parse_without_check(&lang, self.check_error_codes, false); LangString::parse_without_check(lang, self.check_error_codes, false);
if !parse_result.rust { if !parse_result.rust {
return Some(Event::Html( return Some(Event::Html(
format!( format!(
@ -669,7 +669,7 @@ impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, I> {
loop { loop {
match self.inner.next() { match self.inner.next() {
Some((Event::FootnoteReference(ref reference), range)) => { Some((Event::FootnoteReference(ref reference), range)) => {
let entry = self.get_entry(&reference); let entry = self.get_entry(reference);
let reference = format!( let reference = format!(
"<sup id=\"fnref{0}\"><a href=\"#fn{0}\">{0}</a></sup>", "<sup id=\"fnref{0}\"><a href=\"#fn{0}\">{0}</a></sup>",
(*entry).1 (*entry).1
@ -904,7 +904,7 @@ impl LangString {
string string
.split(|c| c == ',' || c == ' ' || c == '\t') .split(|c| c == ',' || c == ' ' || c == '\t')
.map(str::trim) .map(str::trim)
.map(|token| if token.chars().next() == Some('.') { &token[1..] } else { token }) .map(|token| token.strip_prefix('.').unwrap_or(token))
.filter(|token| !token.is_empty()) .filter(|token| !token.is_empty())
} }
@ -974,7 +974,10 @@ impl LangString {
} }
x if extra.is_some() => { x if extra.is_some() => {
let s = x.to_lowercase(); let s = x.to_lowercase();
match if s == "compile-fail" || s == "compile_fail" || s == "compilefail" { if let Some((flag, help)) = if s == "compile-fail"
|| s == "compile_fail"
|| s == "compilefail"
{
Some(( Some((
"compile_fail", "compile_fail",
"the code block will either not be tested if not marked as a rust one \ "the code block will either not be tested if not marked as a rust one \
@ -1007,15 +1010,12 @@ impl LangString {
} else { } else {
None None
} { } {
Some((flag, help)) => { if let Some(extra) = extra {
if let Some(ref extra) = extra { extra.error_invalid_codeblock_attr(
extra.error_invalid_codeblock_attr( &format!("unknown attribute `{}`. Did you mean `{}`?", x, flag),
&format!("unknown attribute `{}`. Did you mean `{}`?", x, flag), help,
help, );
);
}
} }
None => {}
} }
seen_other_tags = true; seen_other_tags = true;
} }
@ -1051,13 +1051,10 @@ impl Markdown<'_> {
return String::new(); return String::new();
} }
let mut replacer = |broken_link: BrokenLink<'_>| { let mut replacer = |broken_link: BrokenLink<'_>| {
if let Some(link) = links
links.iter().find(|link| &*link.original_text == broken_link.reference) .iter()
{ .find(|link| &*link.original_text == broken_link.reference)
Some((link.href.as_str().into(), link.new_text.as_str().into())) .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
} else {
None
}
}; };
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer)); let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer));
@ -1135,13 +1132,10 @@ impl MarkdownSummaryLine<'_> {
} }
let mut replacer = |broken_link: BrokenLink<'_>| { let mut replacer = |broken_link: BrokenLink<'_>| {
if let Some(link) = links
links.iter().find(|link| &*link.original_text == broken_link.reference) .iter()
{ .find(|link| &*link.original_text == broken_link.reference)
Some((link.href.as_str().into(), link.new_text.as_str().into())) .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
} else {
None
}
}; };
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)); let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
@ -1172,13 +1166,10 @@ fn markdown_summary_with_limit(
} }
let mut replacer = |broken_link: BrokenLink<'_>| { let mut replacer = |broken_link: BrokenLink<'_>| {
if let Some(link) = link_names
link_names.iter().find(|link| &*link.original_text == broken_link.reference) .iter()
{ .find(|link| &*link.original_text == broken_link.reference)
Some((link.href.as_str().into(), link.new_text.as_str().into())) .map(|link| (link.href.as_str().into(), link.new_text.as_str().into()))
} else {
None
}
}; };
let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)); let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer));
@ -1413,7 +1404,7 @@ crate fn rust_code_blocks(md: &str, extra_info: &ExtraInfo<'_>) -> Vec<RustCodeB
CodeBlockKind::Indented => { CodeBlockKind::Indented => {
// The ending of the offset goes too far sometime so we reduce it by one in // The ending of the offset goes too far sometime so we reduce it by one in
// these cases. // these cases.
if offset.end > offset.start && md.get(offset.end..=offset.end) == Some(&"\n") { if offset.end > offset.start && md.get(offset.end..=offset.end) == Some("\n") {
( (
LangString::default(), LangString::default(),
offset.start, offset.start,

View file

@ -1,3 +1,4 @@
use std::collections::hash_map::Entry;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
@ -36,7 +37,7 @@ crate fn build_index<'tcx>(krate: &clean::Crate, cache: &mut Cache, tcx: TyCtxt<
if let Some(&(ref fqp, _)) = cache.paths.get(&did) { if let Some(&(ref fqp, _)) = cache.paths.get(&did) {
let desc = item let desc = item
.doc_value() .doc_value()
.map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(&cache))); .map_or_else(String::new, |s| short_markdown_summary(&s, &item.link_names(cache)));
cache.search_index.push(IndexItem { cache.search_index.push(IndexItem {
ty: item.type_(), ty: item.type_(),
name: item.name.unwrap().to_string(), name: item.name.unwrap().to_string(),
@ -44,7 +45,7 @@ crate fn build_index<'tcx>(krate: &clean::Crate, cache: &mut Cache, tcx: TyCtxt<
desc, desc,
parent: Some(did), parent: Some(did),
parent_idx: None, parent_idx: None,
search_type: get_index_search_type(&item, tcx), search_type: get_index_search_type(item, tcx),
aliases: item.attrs.get_doc_aliases(), aliases: item.attrs.get_doc_aliases(),
}); });
} }
@ -53,7 +54,7 @@ crate fn build_index<'tcx>(krate: &clean::Crate, cache: &mut Cache, tcx: TyCtxt<
let crate_doc = krate let crate_doc = krate
.module .module
.doc_value() .doc_value()
.map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(&cache))); .map_or_else(String::new, |s| short_markdown_summary(&s, &krate.module.link_names(cache)));
let Cache { ref mut search_index, ref paths, .. } = *cache; let Cache { ref mut search_index, ref paths, .. } = *cache;
@ -72,7 +73,7 @@ crate fn build_index<'tcx>(krate: &clean::Crate, cache: &mut Cache, tcx: TyCtxt<
// Set up alias indexes. // Set up alias indexes.
for (i, item) in search_index.iter().enumerate() { for (i, item) in search_index.iter().enumerate() {
for alias in &item.aliases[..] { for alias in &item.aliases[..] {
aliases.entry(alias.to_lowercase()).or_insert(Vec::new()).push(i); aliases.entry(alias.to_lowercase()).or_insert_with(Vec::new).push(i);
} }
} }
@ -82,12 +83,11 @@ crate fn build_index<'tcx>(krate: &clean::Crate, cache: &mut Cache, tcx: TyCtxt<
let mut lastpathid = 0usize; let mut lastpathid = 0usize;
for item in search_index { for item in search_index {
item.parent_idx = item.parent.and_then(|defid| { item.parent_idx = item.parent.and_then(|defid| match defid_to_pathid.entry(defid) {
if defid_to_pathid.contains_key(&defid) { Entry::Occupied(entry) => Some(*entry.get()),
defid_to_pathid.get(&defid).copied() Entry::Vacant(entry) => {
} else {
let pathid = lastpathid; let pathid = lastpathid;
defid_to_pathid.insert(defid, pathid); entry.insert(pathid);
lastpathid += 1; lastpathid += 1;
if let Some(&(ref fqp, short)) = paths.get(&defid) { if let Some(&(ref fqp, short)) = paths.get(&defid) {
@ -203,12 +203,12 @@ crate fn get_index_search_type<'tcx>(
let inputs = all_types let inputs = all_types
.iter() .iter()
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind))) .map(|(ty, kind)| TypeWithKind::from((get_index_type(ty), *kind)))
.filter(|a| a.ty.name.is_some()) .filter(|a| a.ty.name.is_some())
.collect(); .collect();
let output = ret_types let output = ret_types
.iter() .iter()
.map(|(ty, kind)| TypeWithKind::from((get_index_type(&ty), *kind))) .map(|(ty, kind)| TypeWithKind::from((get_index_type(ty), *kind)))
.filter(|a| a.ty.name.is_some()) .filter(|a| a.ty.name.is_some())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let output = if output.is_empty() { None } else { Some(output) }; let output = if output.is_empty() { None } else { Some(output) };
@ -296,7 +296,7 @@ crate fn get_real_types<'tcx>(
} }
let mut nb_added = 0; let mut nb_added = 0;
if let &Type::Generic(arg_s) = arg { if let Type::Generic(arg_s) = *arg {
if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g { if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g {
WherePredicate::BoundPredicate { ty, .. } => ty.def_id() == arg.def_id(), WherePredicate::BoundPredicate { ty, .. } => ty.def_id() == arg.def_id(),
_ => false, _ => false,
@ -374,7 +374,7 @@ crate fn get_all_types<'tcx>(
let ret_types = match decl.output { let ret_types = match decl.output {
FnRetTy::Return(ref return_type) => { FnRetTy::Return(ref return_type) => {
let mut ret = FxHashSet::default(); let mut ret = FxHashSet::default();
get_real_types(generics, &return_type, tcx, 0, &mut ret); get_real_types(generics, return_type, tcx, 0, &mut ret);
if ret.is_empty() { if ret.is_empty() {
if let Some(kind) = return_type.def_id().map(|did| tcx.def_kind(did).into()) { if let Some(kind) = return_type.def_id().map(|did| tcx.def_kind(did).into()) {
ret.insert((return_type.clone(), kind)); ret.insert((return_type.clone(), kind));

View file

@ -160,7 +160,7 @@ impl<'tcx> Context<'tcx> {
} }
pub(super) fn sess(&self) -> &'tcx Session { pub(super) fn sess(&self) -> &'tcx Session {
&self.shared.tcx.sess self.shared.tcx.sess
} }
pub(super) fn derive_id(&self, id: String) -> String { pub(super) fn derive_id(&self, id: String) -> String {
@ -188,7 +188,7 @@ impl<'tcx> Context<'tcx> {
}; };
title.push_str(" - Rust"); title.push_str(" - Rust");
let tyname = it.type_(); let tyname = it.type_();
let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(&doc)); let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc));
let desc = if let Some(desc) = desc { let desc = if let Some(desc) = desc {
desc desc
} else if it.is_crate() { } else if it.is_crate() {

View file

@ -126,8 +126,8 @@ impl Serialize for IndexItemFunctionType {
// If we couldn't figure out a type, just write `null`. // If we couldn't figure out a type, just write `null`.
let mut iter = self.inputs.iter(); let mut iter = self.inputs.iter();
if match self.output { if match self.output {
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.ty.name.is_none()), Some(ref output) => iter.chain(output.iter()).any(|i| i.ty.name.is_none()),
None => iter.any(|ref i| i.ty.name.is_none()), None => iter.any(|i| i.ty.name.is_none()),
} { } {
serializer.serialize_none() serializer.serialize_none()
} else { } else {
@ -906,7 +906,7 @@ fn render_assoc_item(
AssocItemLink::GotoSource(did, provided_methods) => { AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding // We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly. // trait-item and need to map the anchored type accordingly.
let ty = if provided_methods.contains(&name) { let ty = if provided_methods.contains(name) {
ItemType::Method ItemType::Method
} else { } else {
ItemType::TyMethod ItemType::TyMethod
@ -965,7 +965,7 @@ fn render_assoc_item(
name = name, name = name,
generics = g.print(cx), generics = g.print(cx),
decl = d.full_print(header_len, indent, header.asyncness, cx), decl = d.full_print(header_len, indent, header.asyncness, cx),
notable_traits = notable_traits_decl(&d, cx), notable_traits = notable_traits_decl(d, cx),
where_clause = print_where_clause(g, cx, indent, end_newline), where_clause = print_where_clause(g, cx, indent, end_newline),
) )
} }
@ -1008,7 +1008,7 @@ fn attributes(it: &clean::Item) -> Vec<String> {
.iter() .iter()
.filter_map(|attr| { .filter_map(|attr| {
if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) { if ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
Some(pprust::attribute_to_string(&attr).replace("\n", "").replace(" ", " ")) Some(pprust::attribute_to_string(attr).replace("\n", "").replace(" ", " "))
} else { } else {
None None
} }
@ -1041,7 +1041,7 @@ enum AssocItemLink<'a> {
impl<'a> AssocItemLink<'a> { impl<'a> AssocItemLink<'a> {
fn anchor(&self, id: &'a str) -> Self { fn anchor(&self, id: &'a str) -> Self {
match *self { match *self {
AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(&id)), AssocItemLink::Anchor(_) => AssocItemLink::Anchor(Some(id)),
ref other => *other, ref other => *other,
} }
} }
@ -1120,7 +1120,7 @@ fn render_assoc_items(
let (blanket_impl, concrete): (Vec<&&Impl>, _) = let (blanket_impl, concrete): (Vec<&&Impl>, _) =
concrete.into_iter().partition(|t| t.inner_impl().blanket_impl.is_some()); concrete.into_iter().partition(|t| t.inner_impl().blanket_impl.is_some());
let mut impls = Buffer::empty_from(&w); let mut impls = Buffer::empty_from(w);
render_impls(cx, &mut impls, &concrete, containing_item); render_impls(cx, &mut impls, &concrete, containing_item);
let impls = impls.into_inner(); let impls = impls.into_inner();
if !impls.is_empty() { if !impls.is_empty() {
@ -1333,7 +1333,7 @@ fn render_impl(
&& match render_mode { && match render_mode {
RenderMode::Normal => true, RenderMode::Normal => true,
RenderMode::ForDeref { mut_: deref_mut_ } => { RenderMode::ForDeref { mut_: deref_mut_ } => {
should_render_item(&item, deref_mut_, cx.tcx()) should_render_item(item, deref_mut_, cx.tcx())
} }
}; };
@ -1566,7 +1566,7 @@ fn render_impl(
&mut impl_items, &mut impl_items,
cx, cx,
&t.trait_, &t.trait_,
&i.inner_impl(), i.inner_impl(),
&i.impl_item, &i.impl_item,
parent, parent,
render_mode, render_mode,
@ -2060,7 +2060,7 @@ fn sidebar_assoc_items(cx: &Context<'_>, out: &mut Buffer, it: &clean::Item) {
} }
} }
fn sidebar_deref_methods(cx: &Context<'_>, out: &mut Buffer, impl_: &Impl, v: &Vec<Impl>) { fn sidebar_deref_methods(cx: &Context<'_>, out: &mut Buffer, impl_: &Impl, v: &[Impl]) {
let c = cx.cache(); let c = cx.cache();
debug!("found Deref: {:?}", impl_); debug!("found Deref: {:?}", impl_);
@ -2159,16 +2159,14 @@ fn get_id_for_impl_on_foreign_type(
fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> { fn extract_for_impl_name(item: &clean::Item, cx: &Context<'_>) -> Option<(String, String)> {
match *item.kind { match *item.kind {
clean::ItemKind::ImplItem(ref i) => { clean::ItemKind::ImplItem(ref i) => {
if let Some(ref trait_) = i.trait_ { i.trait_.as_ref().map(|trait_| {
// Alternative format produces no URLs, // Alternative format produces no URLs,
// so this parameter does nothing. // so this parameter does nothing.
Some(( (
format!("{:#}", i.for_.print(cx)), format!("{:#}", i.for_.print(cx)),
get_id_for_impl_on_foreign_type(&i.for_, trait_, cx), get_id_for_impl_on_foreign_type(&i.for_, trait_, cx),
)) )
} else { })
None
}
} }
_ => None, _ => None,
} }
@ -2343,9 +2341,10 @@ fn sidebar_enum(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, e: &clean:
let mut variants = e let mut variants = e
.variants .variants
.iter() .iter()
.filter_map(|v| match v.name { .filter_map(|v| {
Some(ref name) => Some(format!("<a href=\"#variant.{name}\">{name}</a>", name = name)), v.name
_ => None, .as_ref()
.map(|name| format!("<a href=\"#variant.{name}\">{name}</a>", name = name))
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !variants.is_empty() { if !variants.is_empty() {

View file

@ -34,10 +34,10 @@ use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
use serde::Serialize; use serde::Serialize;
const ITEM_TABLE_OPEN: &'static str = "<div class=\"item-table\">"; const ITEM_TABLE_OPEN: &str = "<div class=\"item-table\">";
const ITEM_TABLE_CLOSE: &'static str = "</div>"; const ITEM_TABLE_CLOSE: &str = "</div>";
const ITEM_TABLE_ROW_OPEN: &'static str = "<div class=\"item-row\">"; const ITEM_TABLE_ROW_OPEN: &str = "<div class=\"item-row\">";
const ITEM_TABLE_ROW_CLOSE: &'static str = "</div>"; const ITEM_TABLE_ROW_CLOSE: &str = "</div>";
// A component in a `use` path, like `string` in std::string::ToString // A component in a `use` path, like `string` in std::string::ToString
#[derive(Serialize)] #[derive(Serialize)]
@ -761,7 +761,7 @@ fn item_trait(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Tra
render_impl( render_impl(
w, w,
cx, cx,
&implementor, implementor,
it, it,
assoc_link, assoc_link,
RenderMode::Normal, RenderMode::Normal,
@ -1497,7 +1497,7 @@ fn render_union(
); );
if let Some(g) = g { if let Some(g) = g {
write!(w, "{}", g.print(cx)); write!(w, "{}", g.print(cx));
write!(w, "{}", print_where_clause(&g, cx, 0, true)); write!(w, "{}", print_where_clause(g, cx, 0, true));
} }
write!(w, " {{\n{}", tab); write!(w, " {{\n{}", tab);

View file

@ -105,7 +105,7 @@ impl Visitor<'tcx> for SpanMapVisitor<'tcx> {
} }
for bound in p.bounds { for bound in p.bounds {
if let Some(trait_ref) = bound.trait_ref() { if let Some(trait_ref) = bound.trait_ref() {
self.handle_path(&trait_ref.path, None); self.handle_path(trait_ref.path, None);
} }
} }
} }
@ -121,42 +121,33 @@ impl Visitor<'tcx> for SpanMapVisitor<'tcx> {
if !span.overlaps(m.inner) { if !span.overlaps(m.inner) {
// Now that we confirmed it's a file import, we want to get the span for the module // Now that we confirmed it's a file import, we want to get the span for the module
// name only and not all the "mod foo;". // name only and not all the "mod foo;".
if let Some(node) = self.tcx.hir().find(id) { if let Some(Node::Item(item)) = self.tcx.hir().find(id) {
match node { self.matches.insert(item.ident.span, LinkFromSrc::Local(clean::Span::new(m.inner)));
Node::Item(item) => {
self.matches
.insert(item.ident.span, LinkFromSrc::Local(clean::Span::new(m.inner)));
}
_ => {}
}
} }
} }
intravisit::walk_mod(self, m, id); intravisit::walk_mod(self, m, id);
} }
fn visit_expr(&mut self, expr: &'tcx rustc_hir::Expr<'tcx>) { fn visit_expr(&mut self, expr: &'tcx rustc_hir::Expr<'tcx>) {
match expr.kind { if let ExprKind::MethodCall(segment, method_span, _, _) = expr.kind {
ExprKind::MethodCall(segment, method_span, _, _) => { if let Some(hir_id) = segment.hir_id {
if let Some(hir_id) = segment.hir_id { let hir = self.tcx.hir();
let hir = self.tcx.hir(); let body_id = hir.enclosing_body_owner(hir_id);
let body_id = hir.enclosing_body_owner(hir_id); let typeck_results = self.tcx.sess.with_disabled_diagnostic(|| {
let typeck_results = self.tcx.sess.with_disabled_diagnostic(|| { self.tcx.typeck_body(
self.tcx.typeck_body( hir.maybe_body_owned_by(body_id).expect("a body which isn't a body"),
hir.maybe_body_owned_by(body_id).expect("a body which isn't a body"), )
) });
}); if let Some(def_id) = typeck_results.type_dependent_def_id(expr.hir_id) {
if let Some(def_id) = typeck_results.type_dependent_def_id(expr.hir_id) { self.matches.insert(
self.matches.insert( method_span,
method_span, match hir.span_if_local(def_id) {
match hir.span_if_local(def_id) { Some(span) => LinkFromSrc::Local(clean::Span::new(span)),
Some(span) => LinkFromSrc::Local(clean::Span::new(span)), None => LinkFromSrc::External(def_id),
None => LinkFromSrc::External(def_id), },
}, );
);
}
} }
} }
_ => {}
} }
intravisit::walk_expr(self, expr); intravisit::walk_expr(self, expr);
} }

View file

@ -128,7 +128,7 @@ impl Context<'_> {
) -> Result<(), Error> { ) -> Result<(), Error> {
if minify { if minify {
let contents = contents.as_ref(); let contents = contents.as_ref();
let contents = if resource.extension() == Some(&OsStr::new("css")) { let contents = if resource.extension() == Some(OsStr::new("css")) {
minifier::css::minify(contents).map_err(|e| { minifier::css::minify(contents).map_err(|e| {
Error::new(format!("failed to minify CSS file: {}", e), resource.path(self)) Error::new(format!("failed to minify CSS file: {}", e), resource.path(self))
})? })?

View file

@ -67,7 +67,7 @@ impl LocalSourcesCollector<'_, '_> {
} }
let mut href = String::new(); let mut href = String::new();
clean_path(&self.src_root, &p, false, |component| { clean_path(self.src_root, &p, false, |component| {
href.push_str(&component.to_string_lossy()); href.push_str(&component.to_string_lossy());
href.push('/'); href.push('/');
}); });
@ -168,7 +168,7 @@ impl SourceCollector<'_, 'tcx> {
}; };
// Remove the utf-8 BOM if any // Remove the utf-8 BOM if any
let contents = if contents.starts_with('\u{feff}') { &contents[3..] } else { &contents }; let contents = contents.strip_prefix('\u{feff}').unwrap_or(&contents);
// Create the intermediate directories // Create the intermediate directories
let mut cur = self.dst.clone(); let mut cur = self.dst.clone();
@ -209,7 +209,7 @@ impl SourceCollector<'_, 'tcx> {
contents, contents,
self.cx.shared.edition(), self.cx.shared.edition(),
file_span, file_span,
&self.cx, self.cx,
&root_path, &root_path,
None, None,
SourceContext::Standalone, SourceContext::Standalone,

View file

@ -412,7 +412,7 @@ impl FromWithTcx<clean::Type> for Type {
.map(|t| { .map(|t| {
clean::GenericBound::TraitBound(t, rustc_hir::TraitBoundModifier::None) clean::GenericBound::TraitBound(t, rustc_hir::TraitBoundModifier::None)
}) })
.chain(lt.into_iter().map(|lt| clean::GenericBound::Outlives(lt))) .chain(lt.into_iter().map(clean::GenericBound::Outlives))
.map(|bound| bound.into_tcx(tcx)) .map(|bound| bound.into_tcx(tcx))
.collect(), .collect(),
} }

View file

@ -775,7 +775,7 @@ fn main_options(options: config::Options) -> MainResult {
// We need to hold on to the complete resolver, so we cause everything to be // We need to hold on to the complete resolver, so we cause everything to be
// cloned for the analysis passes to use. Suboptimal, but necessary in the // cloned for the analysis passes to use. Suboptimal, but necessary in the
// current architecture. // current architecture.
let resolver = core::create_resolver(queries, &sess); let resolver = core::create_resolver(queries, sess);
if sess.has_errors() { if sess.has_errors() {
sess.fatal("Compilation failed, aborting rustdoc"); sess.fatal("Compilation failed, aborting rustdoc");

View file

@ -39,7 +39,7 @@ impl<'a, 'tcx> BareUrlsLinter<'a, 'tcx> {
) { ) {
trace!("looking for raw urls in {}", text); trace!("looking for raw urls in {}", text);
// For now, we only check "full" URLs (meaning, starting with "http://" or "https://"). // For now, we only check "full" URLs (meaning, starting with "http://" or "https://").
for match_ in URL_REGEX.find_iter(&text) { for match_ in URL_REGEX.find_iter(text) {
let url = match_.as_str(); let url = match_.as_str();
let url_range = match_.range(); let url_range = match_.range();
f( f(

View file

@ -36,7 +36,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
let source = dox[code_block.code].to_owned(); let source = dox[code_block.code].to_owned();
let sess = ParseSess::with_span_handler(handler, sm); let sess = ParseSess::with_span_handler(handler, sm);
let edition = code_block.lang_string.edition.unwrap_or(self.cx.tcx.sess.edition()); let edition = code_block.lang_string.edition.unwrap_or_else(|| self.cx.tcx.sess.edition());
let expn_data = ExpnData::default( let expn_data = ExpnData::default(
ExpnKind::AstPass(AstPass::TestHarness), ExpnKind::AstPass(AstPass::TestHarness),
DUMMY_SP, DUMMY_SP,
@ -77,7 +77,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
// The span and whether it is precise or not. // The span and whether it is precise or not.
let (sp, precise_span) = match super::source_span_for_markdown_range( let (sp, precise_span) = match super::source_span_for_markdown_range(
self.cx.tcx, self.cx.tcx,
&dox, dox,
&code_block.range, &code_block.range,
&item.attrs, &item.attrs,
) { ) {
@ -123,7 +123,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
// FIXME(#67563): Provide more context for these errors by displaying the spans inline. // FIXME(#67563): Provide more context for these errors by displaying the spans inline.
for message in buffer.messages.iter() { for message in buffer.messages.iter() {
diag.note(&message); diag.note(message);
} }
diag.emit(); diag.emit();
@ -150,8 +150,8 @@ impl<'a, 'tcx> DocFolder for SyntaxChecker<'a, 'tcx> {
item.def_id.expect_def_id(), item.def_id.expect_def_id(),
sp, sp,
); );
for code_block in markdown::rust_code_blocks(&dox, &extra) { for code_block in markdown::rust_code_blocks(dox, &extra) {
self.check_rust_syntax(&item, &dox, code_block); self.check_rust_syntax(&item, dox, code_block);
} }
} }

View file

@ -115,10 +115,10 @@ crate fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) {
let mut tests = Tests { found_tests: 0 }; let mut tests = Tests { found_tests: 0 };
find_testable_code(&dox, &mut tests, ErrorCodes::No, false, None); find_testable_code(dox, &mut tests, ErrorCodes::No, false, None);
if tests.found_tests == 0 && cx.tcx.sess.is_nightly_build() { if tests.found_tests == 0 && cx.tcx.sess.is_nightly_build() {
if should_have_doc_example(cx, &item) { if should_have_doc_example(cx, item) {
debug!("reporting error for {:?} (hir_id={:?})", item, hir_id); debug!("reporting error for {:?} (hir_id={:?})", item, hir_id);
let sp = item.attr_span(cx.tcx); let sp = item.attr_span(cx.tcx);
cx.tcx.struct_span_lint_hir( cx.tcx.struct_span_lint_hir(

View file

@ -289,7 +289,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
) -> Result<(Res, Option<String>), ErrorKind<'path>> { ) -> Result<(Res, Option<String>), ErrorKind<'path>> {
let tcx = self.cx.tcx; let tcx = self.cx.tcx;
let no_res = || ResolutionFailure::NotResolved { let no_res = || ResolutionFailure::NotResolved {
module_id: module_id, module_id,
partial_res: None, partial_res: None,
unresolved: path_str.into(), unresolved: path_str.into(),
}; };
@ -437,7 +437,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
fn resolve_path(&self, path_str: &str, ns: Namespace, module_id: DefId) -> Option<Res> { fn resolve_path(&self, path_str: &str, ns: Namespace, module_id: DefId) -> Option<Res> {
let result = self.cx.enter_resolver(|resolver| { let result = self.cx.enter_resolver(|resolver| {
resolver resolver
.resolve_str_path_error(DUMMY_SP, &path_str, ns, module_id) .resolve_str_path_error(DUMMY_SP, path_str, ns, module_id)
.and_then(|(_, res)| res.try_into()) .and_then(|(_, res)| res.try_into())
}); });
debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns); debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns);
@ -543,7 +543,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
ty::Uint(uty) => Res::Primitive(uty.into()), ty::Uint(uty) => Res::Primitive(uty.into()),
ty::Float(fty) => Res::Primitive(fty.into()), ty::Float(fty) => Res::Primitive(fty.into()),
ty::Str => Res::Primitive(Str), ty::Str => Res::Primitive(Str),
ty::Tuple(ref tys) if tys.is_empty() => Res::Primitive(Unit), ty::Tuple(tys) if tys.is_empty() => Res::Primitive(Unit),
ty::Tuple(_) => Res::Primitive(Tuple), ty::Tuple(_) => Res::Primitive(Tuple),
ty::Array(..) => Res::Primitive(Array), ty::Array(..) => Res::Primitive(Array),
ty::Slice(_) => Res::Primitive(Slice), ty::Slice(_) => Res::Primitive(Slice),
@ -978,13 +978,13 @@ fn preprocess_link<'a>(
} }
// Parse and strip the disambiguator from the link, if present. // Parse and strip the disambiguator from the link, if present.
let (disambiguator, path_str, link_text) = match Disambiguator::from_str(&link) { let (disambiguator, path_str, link_text) = match Disambiguator::from_str(link) {
Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()), Ok(Some((d, path, link_text))) => (Some(d), path.trim(), link_text.trim()),
Ok(None) => (None, link.trim(), link.trim()), Ok(None) => (None, link.trim(), link.trim()),
Err((err_msg, relative_range)) => { Err((err_msg, relative_range)) => {
// Only report error if we would not have ignored this link. See issue #83859. // Only report error if we would not have ignored this link. See issue #83859.
if !should_ignore_link_with_disambiguators(link) { if !should_ignore_link_with_disambiguators(link) {
let no_backticks_range = range_between_backticks(&ori_link); let no_backticks_range = range_between_backticks(ori_link);
let disambiguator_range = (no_backticks_range.start + relative_range.start) let disambiguator_range = (no_backticks_range.start + relative_range.start)
..(no_backticks_range.start + relative_range.end); ..(no_backticks_range.start + relative_range.end);
return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg))); return Some(Err(PreprocessingError::Disambiguator(disambiguator_range, err_msg)));
@ -1000,7 +1000,7 @@ fn preprocess_link<'a>(
// Strip generics from the path. // Strip generics from the path.
let path_str = if path_str.contains(['<', '>'].as_slice()) { let path_str = if path_str.contains(['<', '>'].as_slice()) {
match strip_generics_from_path(&path_str) { match strip_generics_from_path(path_str) {
Ok(path) => path, Ok(path) => path,
Err(err_kind) => { Err(err_kind) => {
debug!("link has malformed generics: {}", path_str); debug!("link has malformed generics: {}", path_str);
@ -1228,7 +1228,7 @@ impl LinkCollector<'_, '_> {
if self.cx.tcx.privacy_access_levels(()).is_exported(src_id) if self.cx.tcx.privacy_access_levels(()).is_exported(src_id)
&& !self.cx.tcx.privacy_access_levels(()).is_exported(dst_id) && !self.cx.tcx.privacy_access_levels(()).is_exported(dst_id)
{ {
privacy_error(self.cx, &diag_info, &path_str); privacy_error(self.cx, &diag_info, path_str);
} }
} }
@ -1766,8 +1766,8 @@ fn report_diagnostic(
let span = let span =
super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| { super::source_span_for_markdown_range(tcx, dox, link_range, &item.attrs).map(|sp| {
if dox.bytes().nth(link_range.start) == Some(b'`') if dox.as_bytes().get(link_range.start) == Some(&b'`')
&& dox.bytes().nth(link_range.end - 1) == Some(b'`') && dox.as_bytes().get(link_range.end - 1) == Some(&b'`')
{ {
sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1)) sp.with_lo(sp.lo() + BytePos(1)).with_hi(sp.hi() - BytePos(1))
} else { } else {
@ -1868,8 +1868,7 @@ fn resolution_failure(
}; };
name = start; name = start;
for ns in [TypeNS, ValueNS, MacroNS] { for ns in [TypeNS, ValueNS, MacroNS] {
if let Some(res) = if let Some(res) = collector.check_full_res(ns, start, module_id, &None)
collector.check_full_res(ns, &start, module_id, &None)
{ {
debug!("found partial_res={:?}", res); debug!("found partial_res={:?}", res);
*partial_res = Some(res); *partial_res = Some(res);

View file

@ -34,7 +34,7 @@ impl IntraLinkCrateLoader {
let attrs = crate::clean::Attributes::from_ast(attrs, None); let attrs = crate::clean::Attributes::from_ast(attrs, None);
for (parent_module, doc) in attrs.collapsed_doc_value_by_module_level() { for (parent_module, doc) in attrs.collapsed_doc_value_by_module_level() {
debug!(?doc); debug!(?doc);
for link in markdown_links(&doc.as_str()) { for link in markdown_links(doc.as_str()) {
debug!(?link.link); debug!(?link.link);
let path_str = if let Some(Ok(x)) = preprocess_link(&link) { let path_str = if let Some(Ok(x)) = preprocess_link(&link) {
x.path_str x.path_str
@ -46,7 +46,7 @@ impl IntraLinkCrateLoader {
span, span,
&path_str, &path_str,
TypeNS, TypeNS,
parent_module.unwrap_or(self.current_mod.to_def_id()), parent_module.unwrap_or_else(|| self.current_mod.to_def_id()),
); );
}); });
} }

View file

@ -9,7 +9,6 @@ use rustc_hir::Node;
use rustc_hir::CRATE_HIR_ID; use rustc_hir::CRATE_HIR_ID;
use rustc_middle::middle::privacy::AccessLevel; use rustc_middle::middle::privacy::AccessLevel;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_span;
use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE}; use rustc_span::def_id::{CRATE_DEF_ID, LOCAL_CRATE};
use rustc_span::source_map::Spanned; use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::symbol::{kw, sym, Symbol};
@ -277,7 +276,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
_ if self.inlining && !is_pub => {} _ if self.inlining && !is_pub => {}
hir::ItemKind::GlobalAsm(..) => {} hir::ItemKind::GlobalAsm(..) => {}
hir::ItemKind::Use(_, hir::UseKind::ListStem) => {} hir::ItemKind::Use(_, hir::UseKind::ListStem) => {}
hir::ItemKind::Use(ref path, kind) => { hir::ItemKind::Use(path, kind) => {
let is_glob = kind == hir::UseKind::Glob; let is_glob = kind == hir::UseKind::Glob;
// Struct and variant constructors and proc macro stubs always show up alongside // Struct and variant constructors and proc macro stubs always show up alongside