Auto merge of #95697 - klensy:no-strings, r=petrochenkov
refactor: simplify few string related interactions Few small optimizations: check_doc_keyword: don't alloc string for emptiness check check_doc_alias_value: get argument as Symbol to prevent needless string convertions check_doc_attrs: don't alloc vec, iterate over slice. replace as_str() check with symbol check get_single_str_from_tts: don't prealloc string trivial string to str replace LifetimeScopeForPath::NonElided use Vec<Symbol> instead of Vec<String> AssertModuleSource use FxHashSet<Symbol> instead of BTreeSet<String> CrateInfo.crate_name replace FxHashMap<CrateNum, String> with FxHashMap<CrateNum, Symbol>
This commit is contained in:
commit
8c1fb2eb23
15 changed files with 56 additions and 65 deletions
|
@ -13,7 +13,7 @@ pub fn expand_compile_error<'cx>(
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
};
|
};
|
||||||
|
|
||||||
cx.span_err(sp, &var);
|
cx.span_err(sp, var.as_str());
|
||||||
|
|
||||||
DummyResult::any(sp)
|
DummyResult::any(sp)
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,8 +21,8 @@ pub fn expand_option_env<'cx>(
|
||||||
};
|
};
|
||||||
|
|
||||||
let sp = cx.with_def_site_ctxt(sp);
|
let sp = cx.with_def_site_ctxt(sp);
|
||||||
let value = env::var(&var.as_str()).ok().as_deref().map(Symbol::intern);
|
let value = env::var(var.as_str()).ok().as_deref().map(Symbol::intern);
|
||||||
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((Symbol::intern(&var), value));
|
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
|
||||||
let e = match value {
|
let e = match value {
|
||||||
None => {
|
None => {
|
||||||
let lt = cx.lifetime(sp, Ident::new(kw::StaticLifetime, sp));
|
let lt = cx.lifetime(sp, Ident::new(kw::StaticLifetime, sp));
|
||||||
|
|
|
@ -104,7 +104,7 @@ pub fn expand_include<'cx>(
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
};
|
};
|
||||||
// The file will be added to the code map by the parser
|
// The file will be added to the code map by the parser
|
||||||
let file = match resolve_path(cx, file, sp) {
|
let file = match resolve_path(cx, file.as_str(), sp) {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
@ -176,7 +176,7 @@ pub fn expand_include_str(
|
||||||
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_str!") else {
|
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_str!") else {
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
};
|
};
|
||||||
let file = match resolve_path(cx, file, sp) {
|
let file = match resolve_path(cx, file.as_str(), sp) {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
@ -210,7 +210,7 @@ pub fn expand_include_bytes(
|
||||||
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_bytes!") else {
|
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_bytes!") else {
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
};
|
};
|
||||||
let file = match resolve_path(cx, file, sp) {
|
let file = match resolve_path(cx, file.as_str(), sp) {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
|
|
@ -288,8 +288,8 @@ fn load_imported_symbols_for_jit(
|
||||||
match data[cnum.as_usize() - 1] {
|
match data[cnum.as_usize() - 1] {
|
||||||
Linkage::NotLinked | Linkage::IncludedFromDylib => {}
|
Linkage::NotLinked | Linkage::IncludedFromDylib => {}
|
||||||
Linkage::Static => {
|
Linkage::Static => {
|
||||||
let name = &crate_info.crate_name[&cnum];
|
let name = crate_info.crate_name[&cnum];
|
||||||
let mut err = sess.struct_err(&format!("Can't load static lib {}", name.as_str()));
|
let mut err = sess.struct_err(&format!("Can't load static lib {}", name));
|
||||||
err.note("rustc_codegen_cranelift can only load dylibs in JIT mode.");
|
err.note("rustc_codegen_cranelift can only load dylibs in JIT mode.");
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -216,7 +216,7 @@ pub fn each_linked_rlib(
|
||||||
Some(_) => {}
|
Some(_) => {}
|
||||||
None => return Err("could not find formats for rlibs".to_string()),
|
None => return Err("could not find formats for rlibs".to_string()),
|
||||||
}
|
}
|
||||||
let name = &info.crate_name[&cnum];
|
let name = info.crate_name[&cnum];
|
||||||
let used_crate_source = &info.used_crate_source[&cnum];
|
let used_crate_source = &info.used_crate_source[&cnum];
|
||||||
if let Some((path, _)) = &used_crate_source.rlib {
|
if let Some((path, _)) = &used_crate_source.rlib {
|
||||||
f(cnum, &path);
|
f(cnum, &path);
|
||||||
|
@ -467,7 +467,7 @@ fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
|
||||||
let mut all_native_libs = vec![];
|
let mut all_native_libs = vec![];
|
||||||
|
|
||||||
let res = each_linked_rlib(&codegen_results.crate_info, &mut |cnum, path| {
|
let res = each_linked_rlib(&codegen_results.crate_info, &mut |cnum, path| {
|
||||||
let name = &codegen_results.crate_info.crate_name[&cnum];
|
let name = codegen_results.crate_info.crate_name[&cnum];
|
||||||
let native_libs = &codegen_results.crate_info.native_libraries[&cnum];
|
let native_libs = &codegen_results.crate_info.native_libraries[&cnum];
|
||||||
|
|
||||||
// Here when we include the rlib into our staticlib we need to make a
|
// Here when we include the rlib into our staticlib we need to make a
|
||||||
|
|
|
@ -861,7 +861,7 @@ impl CrateInfo {
|
||||||
for &cnum in crates.iter() {
|
for &cnum in crates.iter() {
|
||||||
info.native_libraries
|
info.native_libraries
|
||||||
.insert(cnum, tcx.native_libraries(cnum).iter().map(Into::into).collect());
|
.insert(cnum, tcx.native_libraries(cnum).iter().map(Into::into).collect());
|
||||||
info.crate_name.insert(cnum, tcx.crate_name(cnum).to_string());
|
info.crate_name.insert(cnum, tcx.crate_name(cnum));
|
||||||
info.used_crate_source.insert(cnum, tcx.used_crate_source(cnum).clone());
|
info.used_crate_source.insert(cnum, tcx.used_crate_source(cnum).clone());
|
||||||
if tcx.is_compiler_builtins(cnum) {
|
if tcx.is_compiler_builtins(cnum) {
|
||||||
info.compiler_builtins = Some(cnum);
|
info.compiler_builtins = Some(cnum);
|
||||||
|
|
|
@ -146,7 +146,7 @@ pub struct CrateInfo {
|
||||||
pub profiler_runtime: Option<CrateNum>,
|
pub profiler_runtime: Option<CrateNum>,
|
||||||
pub is_no_builtins: FxHashSet<CrateNum>,
|
pub is_no_builtins: FxHashSet<CrateNum>,
|
||||||
pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
|
pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
|
||||||
pub crate_name: FxHashMap<CrateNum, String>,
|
pub crate_name: FxHashMap<CrateNum, Symbol>,
|
||||||
pub used_libraries: Vec<NativeLib>,
|
pub used_libraries: Vec<NativeLib>,
|
||||||
pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>,
|
pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>,
|
||||||
pub used_crates: Vec<CrateNum>,
|
pub used_crates: Vec<CrateNum>,
|
||||||
|
|
|
@ -1221,7 +1221,7 @@ pub fn get_single_str_from_tts(
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: TokenStream,
|
tts: TokenStream,
|
||||||
name: &str,
|
name: &str,
|
||||||
) -> Option<String> {
|
) -> Option<Symbol> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
if p.token == token::Eof {
|
if p.token == token::Eof {
|
||||||
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
||||||
|
@ -1233,7 +1233,7 @@ pub fn get_single_str_from_tts(
|
||||||
if p.token != token::Eof {
|
if p.token != token::Eof {
|
||||||
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
cx.span_err(sp, &format!("{} takes 1 argument", name));
|
||||||
}
|
}
|
||||||
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| s.to_string())
|
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| s)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts comma-separated expressions from `tts`.
|
/// Extracts comma-separated expressions from `tts`.
|
||||||
|
|
|
@ -22,12 +22,12 @@
|
||||||
//! was re-used.
|
//! was re-used.
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
use rustc_data_structures::stable_set::FxHashSet;
|
||||||
use rustc_hir::def_id::LOCAL_CRATE;
|
use rustc_hir::def_id::LOCAL_CRATE;
|
||||||
use rustc_middle::mir::mono::CodegenUnitNameBuilder;
|
use rustc_middle::mir::mono::CodegenUnitNameBuilder;
|
||||||
use rustc_middle::ty::TyCtxt;
|
use rustc_middle::ty::TyCtxt;
|
||||||
use rustc_session::cgu_reuse_tracker::*;
|
use rustc_session::cgu_reuse_tracker::*;
|
||||||
use rustc_span::symbol::{sym, Symbol};
|
use rustc_span::symbol::{sym, Symbol};
|
||||||
use std::collections::BTreeSet;
|
|
||||||
|
|
||||||
#[allow(missing_docs)]
|
#[allow(missing_docs)]
|
||||||
pub fn assert_module_sources(tcx: TyCtxt<'_>) {
|
pub fn assert_module_sources(tcx: TyCtxt<'_>) {
|
||||||
|
@ -36,12 +36,8 @@ pub fn assert_module_sources(tcx: TyCtxt<'_>) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let available_cgus = tcx
|
let available_cgus =
|
||||||
.collect_and_partition_mono_items(())
|
tcx.collect_and_partition_mono_items(()).1.iter().map(|cgu| cgu.name()).collect();
|
||||||
.1
|
|
||||||
.iter()
|
|
||||||
.map(|cgu| cgu.name().to_string())
|
|
||||||
.collect::<BTreeSet<String>>();
|
|
||||||
|
|
||||||
let ams = AssertModuleSource { tcx, available_cgus };
|
let ams = AssertModuleSource { tcx, available_cgus };
|
||||||
|
|
||||||
|
@ -53,7 +49,7 @@ pub fn assert_module_sources(tcx: TyCtxt<'_>) {
|
||||||
|
|
||||||
struct AssertModuleSource<'tcx> {
|
struct AssertModuleSource<'tcx> {
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
available_cgus: BTreeSet<String>,
|
available_cgus: FxHashSet<Symbol>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> AssertModuleSource<'tcx> {
|
impl<'tcx> AssertModuleSource<'tcx> {
|
||||||
|
@ -124,18 +120,17 @@ impl<'tcx> AssertModuleSource<'tcx> {
|
||||||
|
|
||||||
debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name);
|
debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name);
|
||||||
|
|
||||||
if !self.available_cgus.contains(cgu_name.as_str()) {
|
if !self.available_cgus.contains(&cgu_name) {
|
||||||
|
let mut cgu_names: Vec<&str> =
|
||||||
|
self.available_cgus.iter().map(|cgu| cgu.as_str()).collect();
|
||||||
|
cgu_names.sort();
|
||||||
self.tcx.sess.span_err(
|
self.tcx.sess.span_err(
|
||||||
attr.span,
|
attr.span,
|
||||||
&format!(
|
&format!(
|
||||||
"no module named `{}` (mangled: {}). Available modules: {}",
|
"no module named `{}` (mangled: {}). Available modules: {}",
|
||||||
user_path,
|
user_path,
|
||||||
cgu_name,
|
cgu_name,
|
||||||
self.available_cgus
|
cgu_names.join(", ")
|
||||||
.iter()
|
|
||||||
.map(|cgu| cgu.to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(", ")
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||||
use rustc_hir::ItemLocalId;
|
use rustc_hir::ItemLocalId;
|
||||||
use rustc_macros::HashStable;
|
use rustc_macros::HashStable;
|
||||||
|
use rustc_span::symbol::Symbol;
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, HashStable)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, HashStable)]
|
||||||
pub enum Region {
|
pub enum Region {
|
||||||
|
@ -23,7 +24,7 @@ pub enum Region {
|
||||||
pub enum LifetimeScopeForPath {
|
pub enum LifetimeScopeForPath {
|
||||||
// Contains all lifetime names that are in scope and could possibly be used in generics
|
// Contains all lifetime names that are in scope and could possibly be used in generics
|
||||||
// arguments of path.
|
// arguments of path.
|
||||||
NonElided(Vec<String>),
|
NonElided(Vec<Symbol>),
|
||||||
|
|
||||||
// Information that allows us to suggest args of the form `<'_>` in case
|
// Information that allows us to suggest args of the form `<'_>` in case
|
||||||
// no generic arguments were provided for a path.
|
// no generic arguments were provided for a path.
|
||||||
|
|
|
@ -561,8 +561,7 @@ fn write_scope_tree(
|
||||||
}
|
}
|
||||||
indented_decl.push(';');
|
indented_decl.push(';');
|
||||||
|
|
||||||
let local_name =
|
let local_name = if local == RETURN_PLACE { " return place" } else { "" };
|
||||||
if local == RETURN_PLACE { " return place".to_string() } else { String::new() };
|
|
||||||
|
|
||||||
writeln!(
|
writeln!(
|
||||||
w,
|
w,
|
||||||
|
|
|
@ -20,7 +20,7 @@ use rustc_session::lint::builtin::{
|
||||||
CONFLICTING_REPR_HINTS, INVALID_DOC_ATTRIBUTES, UNUSED_ATTRIBUTES,
|
CONFLICTING_REPR_HINTS, INVALID_DOC_ATTRIBUTES, UNUSED_ATTRIBUTES,
|
||||||
};
|
};
|
||||||
use rustc_session::parse::feature_err;
|
use rustc_session::parse::feature_err;
|
||||||
use rustc_span::symbol::{sym, Symbol};
|
use rustc_span::symbol::{kw, sym, Symbol};
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use rustc_span::{Span, DUMMY_SP};
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
|
@ -536,7 +536,7 @@ impl CheckAttrVisitor<'_> {
|
||||||
fn check_doc_alias_value(
|
fn check_doc_alias_value(
|
||||||
&self,
|
&self,
|
||||||
meta: &NestedMetaItem,
|
meta: &NestedMetaItem,
|
||||||
doc_alias: &str,
|
doc_alias: Symbol,
|
||||||
hir_id: HirId,
|
hir_id: HirId,
|
||||||
target: Target,
|
target: Target,
|
||||||
is_list: bool,
|
is_list: bool,
|
||||||
|
@ -554,14 +554,17 @@ impl CheckAttrVisitor<'_> {
|
||||||
);
|
);
|
||||||
false
|
false
|
||||||
};
|
};
|
||||||
if doc_alias.is_empty() {
|
if doc_alias == kw::Empty {
|
||||||
return err_fn(
|
return err_fn(
|
||||||
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
||||||
"attribute cannot have empty value",
|
"attribute cannot have empty value",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if let Some(c) =
|
|
||||||
doc_alias.chars().find(|&c| c == '"' || c == '\'' || (c.is_whitespace() && c != ' '))
|
let doc_alias_str = doc_alias.as_str();
|
||||||
|
if let Some(c) = doc_alias_str
|
||||||
|
.chars()
|
||||||
|
.find(|&c| c == '"' || c == '\'' || (c.is_whitespace() && c != ' '))
|
||||||
{
|
{
|
||||||
self.tcx.sess.span_err(
|
self.tcx.sess.span_err(
|
||||||
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
||||||
|
@ -573,7 +576,7 @@ impl CheckAttrVisitor<'_> {
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if doc_alias.starts_with(' ') || doc_alias.ends_with(' ') {
|
if doc_alias_str.starts_with(' ') || doc_alias_str.ends_with(' ') {
|
||||||
return err_fn(
|
return err_fn(
|
||||||
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
|
||||||
"cannot start or end with ' '",
|
"cannot start or end with ' '",
|
||||||
|
@ -608,11 +611,11 @@ impl CheckAttrVisitor<'_> {
|
||||||
return err_fn(meta.span(), &format!("isn't allowed on {}", err));
|
return err_fn(meta.span(), &format!("isn't allowed on {}", err));
|
||||||
}
|
}
|
||||||
let item_name = self.tcx.hir().name(hir_id);
|
let item_name = self.tcx.hir().name(hir_id);
|
||||||
if item_name.as_str() == doc_alias {
|
if item_name == doc_alias {
|
||||||
return err_fn(meta.span(), "is the same as the item's name");
|
return err_fn(meta.span(), "is the same as the item's name");
|
||||||
}
|
}
|
||||||
let span = meta.span();
|
let span = meta.span();
|
||||||
if let Err(entry) = aliases.try_insert(doc_alias.to_owned(), span) {
|
if let Err(entry) = aliases.try_insert(doc_alias_str.to_owned(), span) {
|
||||||
self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, span, |lint| {
|
self.tcx.struct_span_lint_hir(UNUSED_ATTRIBUTES, hir_id, span, |lint| {
|
||||||
lint.build("doc alias is duplicated")
|
lint.build("doc alias is duplicated")
|
||||||
.span_label(*entry.entry.get(), "first defined here")
|
.span_label(*entry.entry.get(), "first defined here")
|
||||||
|
@ -635,14 +638,7 @@ impl CheckAttrVisitor<'_> {
|
||||||
match v.literal() {
|
match v.literal() {
|
||||||
Some(l) => match l.kind {
|
Some(l) => match l.kind {
|
||||||
LitKind::Str(s, _) => {
|
LitKind::Str(s, _) => {
|
||||||
if !self.check_doc_alias_value(
|
if !self.check_doc_alias_value(v, s, hir_id, target, true, aliases) {
|
||||||
v,
|
|
||||||
s.as_str(),
|
|
||||||
hir_id,
|
|
||||||
target,
|
|
||||||
true,
|
|
||||||
aliases,
|
|
||||||
) {
|
|
||||||
errors += 1;
|
errors += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -670,8 +666,8 @@ impl CheckAttrVisitor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
errors == 0
|
errors == 0
|
||||||
} else if let Some(doc_alias) = meta.value_str().map(|s| s.to_string()) {
|
} else if let Some(doc_alias) = meta.value_str() {
|
||||||
self.check_doc_alias_value(meta, &doc_alias, hir_id, target, false, aliases)
|
self.check_doc_alias_value(meta, doc_alias, hir_id, target, false, aliases)
|
||||||
} else {
|
} else {
|
||||||
self.tcx
|
self.tcx
|
||||||
.sess
|
.sess
|
||||||
|
@ -686,8 +682,8 @@ impl CheckAttrVisitor<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_doc_keyword(&self, meta: &NestedMetaItem, hir_id: HirId) -> bool {
|
fn check_doc_keyword(&self, meta: &NestedMetaItem, hir_id: HirId) -> bool {
|
||||||
let doc_keyword = meta.value_str().map(|s| s.to_string()).unwrap_or_else(String::new);
|
let doc_keyword = meta.value_str().unwrap_or(kw::Empty);
|
||||||
if doc_keyword.is_empty() {
|
if doc_keyword == kw::Empty {
|
||||||
self.doc_attr_str_error(meta, "keyword");
|
self.doc_attr_str_error(meta, "keyword");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -718,7 +714,7 @@ impl CheckAttrVisitor<'_> {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !rustc_lexer::is_ident(&doc_keyword) {
|
if !rustc_lexer::is_ident(doc_keyword.as_str()) {
|
||||||
self.tcx
|
self.tcx
|
||||||
.sess
|
.sess
|
||||||
.struct_span_err(
|
.struct_span_err(
|
||||||
|
@ -911,20 +907,20 @@ impl CheckAttrVisitor<'_> {
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut is_valid = true;
|
let mut is_valid = true;
|
||||||
|
|
||||||
if let Some(list) = attr.meta().and_then(|mi| mi.meta_item_list().map(|l| l.to_vec())) {
|
if let Some(mi) = attr.meta() && let Some(list) = mi.meta_item_list() {
|
||||||
for meta in &list {
|
for meta in list {
|
||||||
if let Some(i_meta) = meta.meta_item() {
|
if let Some(i_meta) = meta.meta_item() {
|
||||||
match i_meta.name_or_empty() {
|
match i_meta.name_or_empty() {
|
||||||
sym::alias
|
sym::alias
|
||||||
if !self.check_attr_not_crate_level(&meta, hir_id, "alias")
|
if !self.check_attr_not_crate_level(meta, hir_id, "alias")
|
||||||
|| !self.check_doc_alias(&meta, hir_id, target, aliases) =>
|
|| !self.check_doc_alias(meta, hir_id, target, aliases) =>
|
||||||
{
|
{
|
||||||
is_valid = false
|
is_valid = false
|
||||||
}
|
}
|
||||||
|
|
||||||
sym::keyword
|
sym::keyword
|
||||||
if !self.check_attr_not_crate_level(&meta, hir_id, "keyword")
|
if !self.check_attr_not_crate_level(meta, hir_id, "keyword")
|
||||||
|| !self.check_doc_keyword(&meta, hir_id) =>
|
|| !self.check_doc_keyword(meta, hir_id) =>
|
||||||
{
|
{
|
||||||
is_valid = false
|
is_valid = false
|
||||||
}
|
}
|
||||||
|
@ -936,15 +932,15 @@ impl CheckAttrVisitor<'_> {
|
||||||
| sym::html_root_url
|
| sym::html_root_url
|
||||||
| sym::html_no_source
|
| sym::html_no_source
|
||||||
| sym::test
|
| sym::test
|
||||||
if !self.check_attr_crate_level(&attr, &meta, hir_id) =>
|
if !self.check_attr_crate_level(attr, meta, hir_id) =>
|
||||||
{
|
{
|
||||||
is_valid = false;
|
is_valid = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
sym::inline | sym::no_inline
|
sym::inline | sym::no_inline
|
||||||
if !self.check_doc_inline(
|
if !self.check_doc_inline(
|
||||||
&attr,
|
attr,
|
||||||
&meta,
|
meta,
|
||||||
hir_id,
|
hir_id,
|
||||||
target,
|
target,
|
||||||
specified_inline,
|
specified_inline,
|
||||||
|
@ -976,7 +972,7 @@ impl CheckAttrVisitor<'_> {
|
||||||
| sym::plugins => {}
|
| sym::plugins => {}
|
||||||
|
|
||||||
sym::test => {
|
sym::test => {
|
||||||
if !self.check_test_attr(&meta, hir_id) {
|
if !self.check_test_attr(meta, hir_id) {
|
||||||
is_valid = false;
|
is_valid = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -696,7 +696,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> {
|
||||||
hir::ItemKind::ExternCrate(_) => {
|
hir::ItemKind::ExternCrate(_) => {
|
||||||
// compiler-generated `extern crate` items have a dummy span.
|
// compiler-generated `extern crate` items have a dummy span.
|
||||||
// `std` is still checked for the `restricted-std` feature.
|
// `std` is still checked for the `restricted-std` feature.
|
||||||
if item.span.is_dummy() && item.ident.as_str() != "std" {
|
if item.span.is_dummy() && item.ident.name != sym::std {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -587,7 +587,7 @@ fn get_lifetime_scopes_for_path(mut scope: &Scope<'_>) -> LifetimeScopeForPath {
|
||||||
match scope {
|
match scope {
|
||||||
Scope::Binder { lifetimes, s, .. } => {
|
Scope::Binder { lifetimes, s, .. } => {
|
||||||
available_lifetimes.extend(lifetimes.keys().filter_map(|p| match p {
|
available_lifetimes.extend(lifetimes.keys().filter_map(|p| match p {
|
||||||
hir::ParamName::Plain(ident) => Some(ident.name.to_string()),
|
hir::ParamName::Plain(ident) => Some(ident.name),
|
||||||
_ => None,
|
_ => None,
|
||||||
}));
|
}));
|
||||||
scope = s;
|
scope = s;
|
||||||
|
|
|
@ -497,7 +497,7 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
|
||||||
param_names
|
param_names
|
||||||
.iter()
|
.iter()
|
||||||
.take(num_params_to_take)
|
.take(num_params_to_take)
|
||||||
.map(|p| (*p).clone())
|
.map(|p| p.as_str())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ")
|
.join(", ")
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue