1
Fork 0

Auto merge of #91957 - nnethercote:rm-SymbolStr, r=oli-obk

Remove `SymbolStr`

This was originally proposed in https://github.com/rust-lang/rust/pull/74554#discussion_r466203544. As well as removing the icky `SymbolStr` type, it allows the removal of a lot of `&` and `*` occurrences.

Best reviewed one commit at a time.

r? `@oli-obk`
This commit is contained in:
bors 2021-12-19 09:31:37 +00:00
commit a41a6925ba
140 changed files with 354 additions and 415 deletions

View file

@ -35,12 +35,12 @@ impl LitKind {
LitKind::Bool(symbol == kw::True)
}
token::Byte => {
return unescape_byte(&symbol.as_str())
return unescape_byte(symbol.as_str())
.map(LitKind::Byte)
.map_err(|_| LitError::LexerError);
}
token::Char => {
return unescape_char(&symbol.as_str())
return unescape_char(symbol.as_str())
.map(LitKind::Char)
.map_err(|_| LitError::LexerError);
}

View file

@ -1278,7 +1278,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
pub(super) fn lower_abi(&mut self, abi: StrLit) -> abi::Abi {
abi::lookup(&abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
abi::lookup(abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
self.error_on_invalid_abi(abi);
abi::Abi::Rust
})

View file

@ -580,8 +580,7 @@ impl<'a> AstValidator<'a> {
/// An item in `extern { ... }` cannot use non-ascii identifier.
fn check_foreign_item_ascii_only(&self, ident: Ident) {
let symbol_str = ident.as_str();
if !symbol_str.is_ascii() {
if !ident.as_str().is_ascii() {
let n = 83942;
self.err_handler()
.struct_span_err(

View file

@ -61,7 +61,7 @@ impl<'a> PostExpansionVisitor<'a> {
fn check_abi(&self, abi: ast::StrLit) {
let ast::StrLit { symbol_unescaped, span, .. } = abi;
match &*symbol_unescaped.as_str() {
match symbol_unescaped.as_str() {
// Stable
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
| "system" => {}

View file

@ -204,7 +204,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
};
if let Some(suffix) = suffix {
out.push_str(&suffix.as_str())
out.push_str(suffix.as_str())
}
out
@ -384,7 +384,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
fn print_symbol(&mut self, sym: Symbol, style: ast::StrStyle) {
self.print_string(&sym.as_str(), style);
self.print_string(sym.as_str(), style);
}
fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) {

View file

@ -236,7 +236,7 @@ where
// These unwraps are safe because `get` ensures the meta item
// is a name/value pair string literal.
issue_num = match &*issue.unwrap().as_str() {
issue_num = match issue.unwrap().as_str() {
"none" => None,
issue => {
let emit_diag = |msg: &str| {
@ -301,7 +301,7 @@ where
match (feature, reason, issue) {
(Some(feature), reason, Some(_)) => {
if !rustc_lexer::is_ident(&feature.as_str()) {
if !rustc_lexer::is_ident(feature.as_str()) {
handle_errors(
&sess.parse_sess,
attr.span,
@ -535,7 +535,7 @@ pub fn eval_condition(
return false;
}
};
let min_version = match parse_version(&min_version.as_str(), false) {
let min_version = match parse_version(min_version.as_str(), false) {
Some(ver) => ver,
None => {
sess.span_diagnostic

View file

@ -416,7 +416,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
tcx,
generics,
&mut err,
&param.name.as_str(),
param.name.as_str(),
"Copy",
None,
);

View file

@ -206,7 +206,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
{
let local_info = &self.body.local_decls[local].local_info;
if let Some(box LocalInfo::StaticRef { def_id, .. }) = *local_info {
buf.push_str(&self.infcx.tcx.item_name(def_id).as_str());
buf.push_str(self.infcx.tcx.item_name(def_id).as_str());
} else {
unreachable!();
}
@ -318,7 +318,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let decl = &self.body.local_decls[local];
match self.local_names[local] {
Some(name) if !decl.from_compiler_desugaring() => {
buf.push_str(&name.as_str());
buf.push_str(name.as_str());
Ok(())
}
_ => Err(()),

View file

@ -573,7 +573,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
template_snippet.as_ref().map(|s| Symbol::intern(s)),
template_sp,
));
let template_str = &template_str.as_str();
let template_str = template_str.as_str();
if let Some(InlineAsmArch::X86 | InlineAsmArch::X86_64) = ecx.sess.asm_arch {
let find_span = |needle: &str| -> Span {

View file

@ -21,7 +21,7 @@ pub fn expand_concat(
match e.kind {
ast::ExprKind::Lit(ref lit) => match lit.kind {
ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
accumulator.push_str(&s.as_str());
accumulator.push_str(s.as_str());
}
ast::LitKind::Char(c) => {
accumulator.push(c);

View file

@ -29,7 +29,7 @@ pub fn expand_concat_idents<'cx>(
} else {
if let TokenTree::Token(token) = e {
if let Some((ident, _)) = token.ident() {
res_str.push_str(&ident.name.as_str());
res_str.push_str(ident.name.as_str());
continue;
}
}

View file

@ -121,7 +121,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
let help_msg = match lit.token.kind {
token::Str if rustc_lexer::is_ident(&lit.token.symbol.as_str()) => {
token::Str if rustc_lexer::is_ident(lit.token.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token.symbol)
}
_ => "for example, write `#[derive(Debug)]` for `Debug`".to_string(),

View file

@ -80,11 +80,11 @@ pub fn expand_env<'cx>(
}
let sp = cx.with_def_site_ctxt(sp);
let value = env::var(&*var.as_str()).ok().as_deref().map(Symbol::intern);
let value = env::var(var.as_str()).ok().as_deref().map(Symbol::intern);
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
let e = match value {
None => {
cx.span_err(sp, &msg.as_str());
cx.span_err(sp, msg.as_str());
return DummyResult::any(sp);
}
Some(value) => cx.expr_str(sp, value),

View file

@ -955,7 +955,7 @@ pub fn expand_preparsed_format_args(
ast::StrStyle::Raw(raw) => Some(raw as usize),
};
let fmt_str = &fmt_str.as_str(); // for the suggestions below
let fmt_str = fmt_str.as_str(); // for the suggestions below
let fmt_snippet = ecx.source_map().span_to_snippet(fmt_sp).ok();
let mut parser = parse::Parser::new(
fmt_str,

View file

@ -369,7 +369,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
TodoItem::Static(def_id) => {
//println!("static {:?}", def_id);
let section_name = tcx.codegen_fn_attrs(def_id).link_section.map(|s| s.as_str());
let section_name = tcx.codegen_fn_attrs(def_id).link_section;
let alloc = tcx.eval_static_initializer(def_id).unwrap();
@ -388,6 +388,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
if let Some(section_name) = section_name {
let (segment_name, section_name) = if tcx.sess.target.is_like_osx {
let section_name = section_name.as_str();
if let Some(names) = section_name.split_once(',') {
names
} else {
@ -397,7 +398,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
));
}
} else {
("", &*section_name)
("", section_name.as_str())
};
data_ctx.set_segment_section(segment_name, section_name);
}

View file

@ -84,7 +84,7 @@ fn reuse_workproduct_for_cgu(
let work_product = cgu.work_product(tcx);
if let Some(saved_file) = &work_product.saved_file {
let obj_out =
tcx.output_filenames(()).temp_path(OutputType::Object, Some(&cgu.name().as_str()));
tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str()));
object = Some(obj_out.clone());
let source_file = rustc_incremental::in_incr_comp_dir_sess(&tcx.sess, &saved_file);
if let Err(err) = rustc_fs_util::link_or_copy(&source_file, &obj_out) {
@ -176,7 +176,7 @@ fn module_codegen(
)
});
codegen_global_asm(tcx, &cgu.name().as_str(), &cx.global_asm);
codegen_global_asm(tcx, cgu.name().as_str(), &cx.global_asm);
codegen_result
}
@ -207,7 +207,7 @@ pub(crate) fn run_aot(
cgus.iter()
.map(|cgu| {
let cgu_reuse = determine_cgu_reuse(tcx, cgu);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
_ if backend_config.disable_incr_cache => {}

View file

@ -33,7 +33,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
return value;
}
let global = self.global_string(&*symbol.as_str());
let global = self.global_string(symbol.as_str());
self.const_cstr_cache.borrow_mut().insert(symbol, global);
global

View file

@ -17,7 +17,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
global
}
@ -53,7 +53,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
let global_address = global.get_address(None);
self.globals.borrow_mut().insert(name.to_string(), global_address);

View file

@ -88,7 +88,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = tcx.item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = self.layout_of(ret_ty).gcc_type(self, true);
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);

View file

@ -52,7 +52,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
let sig =
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), callee_ty.fn_sig(tcx));
let arg_tys = sig.inputs();
let name_str = &*name.as_str();
let name_str = name.as_str();
// every intrinsic below takes a SIMD vector as its first argument
require_simd!(arg_tys[0], "input");

View file

@ -326,7 +326,7 @@ pub fn from_fn_attrs<'ll, 'tcx>(
.target_features
.iter()
.flat_map(|f| {
let feature = &f.as_str();
let feature = f.as_str();
llvm_util::to_llvm_feature(cx.tcx.sess, feature)
.into_iter()
.map(|f| format!("+{}", f))
@ -351,7 +351,7 @@ pub fn from_fn_attrs<'ll, 'tcx>(
let name =
codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id()));
let name = CString::new(&name.as_str()[..]).unwrap();
let name = CString::new(name.as_str()).unwrap();
llvm::AddFunctionAttrStringValue(
llfn,
llvm::AttributePlace::Function,

View file

@ -79,7 +79,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
&[cgu_name.to_string(), cgu.size_estimate().to_string()],
);
// Instantiate monomorphizations without filling out definitions yet...
let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str());
let llvm_module = ModuleLlvm::new(tcx, cgu_name.as_str());
{
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx);
@ -143,7 +143,7 @@ pub fn set_link_section(llval: &Value, attrs: &CodegenFnAttrs) {
None => return,
};
unsafe {
let buf = SmallCStr::new(&sect.as_str());
let buf = SmallCStr::new(sect.as_str());
llvm::LLVMSetSection(llval, buf.as_ptr());
}
}

View file

@ -320,7 +320,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
let dbg_cx = if tcx.sess.opts.debuginfo != DebugInfo::None {
let dctx = debuginfo::CrateDebugContext::new(llmod);
debuginfo::metadata::compile_unit_metadata(tcx, &codegen_unit.name().as_str(), &dctx);
debuginfo::metadata::compile_unit_metadata(tcx, codegen_unit.name().as_str(), &dctx);
Some(dctx)
} else {
None

View file

@ -1037,7 +1037,7 @@ pub fn compile_unit_metadata<'ll, 'tcx>(
) -> &'ll DIDescriptor {
let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
Some(ref path) => path.clone(),
None => PathBuf::from(&*tcx.crate_name(LOCAL_CRATE).as_str()),
None => PathBuf::from(tcx.crate_name(LOCAL_CRATE).as_str()),
};
// To avoid breaking split DWARF, we need to ensure that each codegen unit
@ -1371,7 +1371,7 @@ fn closure_saved_names_of_captured_variables(tcx: TyCtxt<'_>, def_id: DefId) ->
_ => return None,
};
let prefix = if is_ref { "_ref__" } else { "" };
Some(prefix.to_owned() + &var.name.as_str())
Some(prefix.to_owned() + var.name.as_str())
})
.collect::<Vec<_>>()
}
@ -1949,7 +1949,7 @@ enum VariantInfo<'a, 'tcx> {
impl<'tcx> VariantInfo<'_, 'tcx> {
fn map_struct_name<R>(&self, f: impl FnOnce(&str) -> R) -> R {
match self {
VariantInfo::Adt(variant) => f(&variant.ident.as_str()),
VariantInfo::Adt(variant) => f(variant.ident.as_str()),
VariantInfo::Generator { variant_index, .. } => {
f(&GeneratorSubsts::variant_name(*variant_index))
}
@ -2114,8 +2114,8 @@ fn prepare_enum_metadata<'ll, 'tcx>(
let item_name;
let discriminant_name = match enum_type.kind() {
ty::Adt(..) => {
item_name = tcx.item_name(enum_def_id).as_str();
&*item_name
item_name = tcx.item_name(enum_def_id);
item_name.as_str()
}
ty::Generator(..) => enum_name.as_str(),
_ => bug!(),
@ -2448,7 +2448,7 @@ fn compute_type_parameters<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -
cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty);
let actual_type_metadata =
type_metadata(cx, actual_type, rustc_span::DUMMY_SP);
let name = &name.as_str();
let name = name.as_str();
Some(unsafe {
Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -2590,7 +2590,8 @@ pub fn create_global_var_metadata<'ll>(cx: &CodegenCx<'ll, '_>, def_id: DefId, g
let is_local_to_unit = is_node_local_to_unit(cx, def_id);
let variable_type = Instance::mono(cx.tcx, def_id).ty(cx.tcx, ty::ParamEnv::reveal_all());
let type_metadata = type_metadata(cx, variable_type, span);
let var_name = tcx.item_name(def_id).as_str();
let var_name = tcx.item_name(def_id);
let var_name = var_name.as_str();
let linkage_name = mangled_name_of_instance(cx, Instance::mono(tcx, def_id)).name;
// When empty, linkage_name field is omitted,
// which is what we want for no_mangle statics

View file

@ -48,7 +48,7 @@ impl Command {
}
pub fn sym_arg(&mut self, arg: Symbol) -> &mut Command {
self.arg(&*arg.as_str());
self.arg(arg.as_str());
self
}

View file

@ -88,7 +88,7 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
sess,
crate_type,
outputs,
&codegen_results.crate_info.local_crate_name.as_str(),
codegen_results.crate_info.local_crate_name.as_str(),
);
match crate_type {
CrateType::Rlib => {

View file

@ -672,7 +672,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
}
let cgu_reuse = cgu_reuse[i];
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
CguReuse::No => {

View file

@ -516,7 +516,7 @@ fn push_unqualified_item_name(
) {
match disambiguated_data.data {
DefPathData::CrateRoot => {
output.push_str(&tcx.crate_name(def_id.krate).as_str());
output.push_str(tcx.crate_name(def_id.krate).as_str());
}
DefPathData::ClosureExpr if tcx.generator_kind(def_id).is_some() => {
// Generators look like closures, but we want to treat them differently
@ -529,7 +529,7 @@ fn push_unqualified_item_name(
}
_ => match disambiguated_data.data.name() {
DefPathDataName::Named(name) => {
output.push_str(&name.as_str());
output.push_str(name.as_str());
}
DefPathDataName::Anon { namespace } => {
if cpp_like_names(tcx) {

View file

@ -68,7 +68,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = bx.tcx().item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = bx.backend_type(bx.layout_of(ret_ty));
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
@ -375,7 +375,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
use crate::common::AtomicOrdering::*;
use crate::common::{AtomicRmwBinOp, SynchronizationScope};
let split: Vec<&str> = name_str.split('_').collect();
let split: Vec<_> = name_str.split('_').collect();
let is_cxchg = split[1] == "cxchg" || split[1] == "cxchgweak";
let (order, failorder) = match split.len() {

View file

@ -82,7 +82,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> MPlaceTy<'tcx, M::PointerTag> {
let loc_details = &self.tcx.sess.opts.debugging_opts.location_detail;
let file = if loc_details.file {
self.allocate_str(&filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
self.allocate_str(filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
} else {
// FIXME: This creates a new allocation each time. It might be preferable to
// perform this allocation only once, and re-use the `MPlaceTy`.

View file

@ -88,7 +88,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.path.push_str(&self.tcx.crate_name(cnum).as_str());
self.path.push_str(self.tcx.crate_name(cnum).as_str());
Ok(self)
}

View file

@ -171,7 +171,7 @@ fn get_features(
}
if let Some(allowed) = sess.opts.debugging_opts.allow_features.as_ref() {
if allowed.iter().all(|f| name.as_str() != *f) {
if allowed.iter().all(|f| name.as_str() != f) {
struct_span_err!(
span_handler,
mi.span(),

View file

@ -103,10 +103,10 @@ crate fn mod_dir_path(
if let DirOwnership::Owned { relative } = &mut dir_ownership {
if let Some(ident) = relative.take() {
// Remove the relative offset.
dir_path.push(&*ident.as_str());
dir_path.push(ident.as_str());
}
}
dir_path.push(&*ident.as_str());
dir_path.push(ident.as_str());
(dir_path, dir_ownership)
}
@ -170,8 +170,8 @@ fn mod_file_path_from_attr(
) -> Option<PathBuf> {
// Extract path string from first `#[path = "path_string"]` attribute.
let first_path = attrs.iter().find(|at| at.has_name(sym::path))?;
let path_string = match first_path.value_str() {
Some(s) => s.as_str(),
let path_sym = match first_path.value_str() {
Some(s) => s,
None => {
// This check is here mainly to catch attempting to use a macro,
// such as #[path = concat!(...)]. This isn't currently supported
@ -189,14 +189,16 @@ fn mod_file_path_from_attr(
}
};
let path_str = path_sym.as_str();
// On windows, the base path might have the form
// `\\?\foo\bar` in which case it does not tolerate
// mixed `/` and `\` separators, so canonicalize
// `/` to `\`.
#[cfg(windows)]
let path_string = path_string.replace("/", "\\");
let path_str = path_str.replace("/", "\\");
Some(dir_path.join(&*path_string))
Some(dir_path.join(path_str))
}
/// Returns a path to a module.

View file

@ -331,9 +331,9 @@ pub struct Ident {
impl Ident {
fn new(sess: &ParseSess, sym: Symbol, is_raw: bool, span: Span) -> Ident {
let sym = nfc_normalize(&sym.as_str());
let sym = nfc_normalize(sym.as_str());
let string = sym.as_str();
if !rustc_lexer::is_ident(&string) {
if !rustc_lexer::is_ident(string) {
panic!("`{:?}` is not a valid identifier", string)
}
if is_raw && !sym.can_be_raw() {

View file

@ -173,7 +173,7 @@ impl DisambiguatedDefPathData {
if verbose && self.disambiguator != 0 {
write!(writer, "{}#{}", name, self.disambiguator)
} else {
writer.write_str(&name.as_str())
writer.write_str(name.as_str())
}
}
DefPathDataName::Anon { namespace } => {
@ -499,7 +499,7 @@ impl DefPathData {
impl fmt::Display for DefPathData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.name() {
DefPathDataName::Named(name) => f.write_str(&name.as_str()),
DefPathDataName::Named(name) => f.write_str(name.as_str()),
// FIXME(#70334): this will generate legacy {{closure}}, {{impl}}, etc
DefPathDataName::Anon { namespace } => write!(f, "{{{{{}}}}}", namespace),
}

View file

@ -131,7 +131,7 @@ impl<'tcx> IfThisChanged<'tcx> {
DepNode::from_def_path_hash(self.tcx, def_path_hash, DepKind::hir_owner)
}
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(
@ -147,7 +147,7 @@ impl<'tcx> IfThisChanged<'tcx> {
let dep_node_interned = self.argument(attr);
let dep_node = match dep_node_interned {
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(

View file

@ -124,7 +124,7 @@ impl<'tcx> AssertModuleSource<'tcx> {
debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name);
if !self.available_cgus.contains(&*cgu_name.as_str()) {
if !self.available_cgus.contains(cgu_name.as_str()) {
self.tcx.sess.span_err(
attr.span,
&format!(

View file

@ -2252,8 +2252,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
.map(|p| p.name.as_str()),
);
}
let lts = lts_names.iter().map(|s| -> &str { &*s }).collect::<Vec<_>>();
possible.find(|candidate| !lts.contains(&candidate.as_str()))
possible.find(|candidate| !lts_names.contains(&&candidate[..]))
})
.unwrap_or("'lt".to_string());
let add_lt_sugg = generics

View file

@ -324,7 +324,7 @@ pub fn configure_and_expand(
let crate_attrs = krate.attrs.clone();
let extern_mod_loaded = |ident: Ident, attrs, items, span| {
let krate = ast::Crate { attrs, items, span, is_placeholder: None };
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, &ident.name.as_str());
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, ident.name.as_str());
(krate.attrs, krate.items)
};
let mut ecx = ExtCtxt::new(sess, cfg, resolver, Some(&extern_mod_loaded));
@ -631,7 +631,7 @@ fn write_out_deps(
// (e.g. accessed in proc macros).
let file_depinfo = sess.parse_sess.file_depinfo.borrow();
let extra_tracked_files = file_depinfo.iter().map(|path_sym| {
let path = PathBuf::from(&*path_sym.as_str());
let path = PathBuf::from(path_sym.as_str());
let file = FileName::from(path);
escape_dep_filename(&file.prefer_local().to_string())
});
@ -1049,8 +1049,8 @@ fn encode_and_write_metadata(
let need_metadata_file = tcx.sess.opts.output_types.contains_key(&OutputType::Metadata);
if need_metadata_file {
let crate_name = &tcx.crate_name(LOCAL_CRATE).as_str();
let out_filename = filename_for_metadata(tcx.sess, crate_name, outputs);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let out_filename = filename_for_metadata(tcx.sess, crate_name.as_str(), outputs);
// To avoid races with another rustc process scanning the output directory,
// we need to write the file somewhere else and atomically move it to its
// final destination, with an `fs::rename` call. In order for the rename to

View file

@ -3181,7 +3181,7 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels {
} = expr
{
for (template_sym, template_snippet, template_span) in template_strs.iter() {
let template_str = &template_sym.as_str();
let template_str = template_sym.as_str();
let find_label_span = |needle: &str| -> Option<Span> {
if let Some(template_snippet) = template_snippet {
let snippet = template_snippet.as_str();

View file

@ -381,10 +381,10 @@ impl LintStore {
lint_name,
self.lint_groups.keys().collect::<Vec<_>>()
);
let lint_name_str = &*lint_name.as_str();
self.lint_groups.contains_key(&lint_name_str) || {
let lint_name_str = lint_name.as_str();
self.lint_groups.contains_key(lint_name_str) || {
let warnings_name_str = crate::WARNINGS.name_lower();
lint_name_str == &*warnings_name_str
lint_name_str == warnings_name_str
}
}

View file

@ -127,7 +127,7 @@ impl HiddenUnicodeCodepoints {
impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &ast::Attribute) {
if let ast::AttrKind::DocComment(_, comment) = attr.kind {
if contains_text_flow_control_chars(&comment.as_str()) {
if contains_text_flow_control_chars(comment.as_str()) {
self.lint_text_direction_codepoint(cx, comment, attr.span, 0, false, "doc comment");
}
}
@ -138,7 +138,7 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
let (text, span, padding) = match &expr.kind {
ast::ExprKind::Lit(ast::Lit { token, kind, span }) => {
let text = token.symbol;
if !contains_text_flow_control_chars(&text.as_str()) {
if !contains_text_flow_control_chars(text.as_str()) {
return;
}
let padding = match kind {

View file

@ -154,7 +154,7 @@ impl<'s> LintLevelsBuilder<'s> {
LintLevelSource::Node(_, forbid_source_span, reason) => {
diag_builder.span_label(forbid_source_span, "`forbid` level set here");
if let Some(rationale) = reason {
diag_builder.note(&rationale.as_str());
diag_builder.note(rationale.as_str());
}
}
LintLevelSource::CommandLine(_, _) => {

View file

@ -218,8 +218,7 @@ impl EarlyLintPass for NonAsciiIdents {
cx.struct_span_lint(CONFUSABLE_IDENTS, sp, |lint| {
lint.build(&format!(
"identifier pair considered confusable between `{}` and `{}`",
existing_symbol.as_str(),
symbol.as_str()
existing_symbol, symbol
))
.span_label(
*existing_span,

View file

@ -9,7 +9,7 @@ use rustc_middle::ty::subst::InternalSubsts;
use rustc_parse_format::{ParseMode, Parser, Piece};
use rustc_session::lint::FutureIncompatibilityReason;
use rustc_span::edition::Edition;
use rustc_span::{hygiene, sym, symbol::kw, symbol::SymbolStr, InnerSpan, Span, Symbol};
use rustc_span::{hygiene, sym, symbol::kw, InnerSpan, Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt;
declare_lint! {
@ -71,14 +71,14 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
if let hir::ExprKind::Lit(lit) = &arg.kind {
if let ast::LitKind::Str(sym, _) = lit.node {
// The argument is a string literal.
check_panic_str(cx, f, arg, &sym.as_str());
check_panic_str(cx, f, arg, sym.as_str());
return;
}
}
// The argument is *not* a string literal.
let (span, panic, symbol_str) = panic_call(cx, f);
let (span, panic, symbol) = panic_call(cx, f);
if in_external_macro(cx.sess(), span) {
// Nothing that can be done about it in the current crate.
@ -103,7 +103,7 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
cx.struct_span_lint(NON_FMT_PANICS, arg_span, |lint| {
let mut l = lint.build("panic message is not a string literal");
l.note(&format!("this usage of {}!() is deprecated; it will be a hard error in Rust 2021", symbol_str));
l.note(&format!("this usage of {}!() is deprecated; it will be a hard error in Rust 2021", symbol));
l.note("for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2021/panic-macro-consistency.html>");
if !is_arg_inside_call(arg_span, span) {
// No clue where this argument is coming from.
@ -112,7 +112,7 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
}
if arg_macro.map_or(false, |id| cx.tcx.is_diagnostic_item(sym::format_macro, id)) {
// A case of `panic!(format!(..))`.
l.note(format!("the {}!() macro supports formatting, so there's no need for the format!() macro here", symbol_str).as_str());
l.note(format!("the {}!() macro supports formatting, so there's no need for the format!() macro here", symbol).as_str());
if let Some((open, close, _)) = find_delimiters(cx, arg_span) {
l.multipart_suggestion(
"remove the `format!(..)` macro call",
@ -301,7 +301,7 @@ fn find_delimiters<'tcx>(cx: &LateContext<'tcx>, span: Span) -> Option<(Span, Sp
))
}
fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span, Symbol, SymbolStr) {
fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span, Symbol, Symbol) {
let mut expn = f.span.ctxt().outer_expn_data();
let mut panic_macro = kw::Empty;
@ -328,7 +328,7 @@ fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span,
let macro_symbol =
if let hygiene::ExpnKind::Macro(_, symbol) = expn.kind { symbol } else { sym::panic };
(expn.call_site, panic_macro, macro_symbol.as_str())
(expn.call_site, panic_macro, macro_symbol)
}
fn is_arg_inside_call(arg: Span, call: Span) -> bool {

View file

@ -133,7 +133,7 @@ fn to_camel_case(s: &str) -> String {
impl NonCamelCaseTypes {
fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_camel_case(name) {
cx.struct_span_lint(NON_CAMEL_CASE_TYPES, ident.span, |lint| {
@ -276,7 +276,7 @@ impl NonSnakeCase {
})
}
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_snake_case(name) {
cx.struct_span_lint(NON_SNAKE_CASE, ident.span, |lint| {
@ -484,7 +484,7 @@ declare_lint_pass!(NonUpperCaseGlobals => [NON_UPPER_CASE_GLOBALS]);
impl NonUpperCaseGlobals {
fn check_upper_case(cx: &LateContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if name.chars().any(|c| c.is_lowercase()) {
cx.struct_span_lint(NON_UPPER_CASE_GLOBALS, ident.span, |lint| {
let uc = NonSnakeCase::to_snake_case(&name).to_uppercase();

View file

@ -315,7 +315,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
let mut err = lint.build(&msg);
// check for #[must_use = "..."]
if let Some(note) = attr.value_str() {
err.note(&note.as_str());
err.note(note.as_str());
}
err.emit();
});

View file

@ -292,7 +292,7 @@ impl<'a> CrateLoader<'a> {
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = self.cstore.get_crate_data(cnum).cdata.source();
if let Some(entry) = self.sess.opts.externs.get(&name.as_str()) {
if let Some(entry) = self.sess.opts.externs.get(name.as_str()) {
// Only use `--extern crate_name=path` here, not `--extern crate_name`.
if let Some(mut files) = entry.files() {
if files.any(|l| {
@ -381,7 +381,7 @@ impl<'a> CrateLoader<'a> {
let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash());
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map_or(false, |e| e.is_private_dep);
self.sess.opts.externs.get(name.as_str()).map_or(false, |e| e.is_private_dep);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();
@ -997,7 +997,7 @@ impl<'a> CrateLoader<'a> {
);
let name = match orig_name {
Some(orig_name) => {
validate_crate_name(self.sess, &orig_name.as_str(), Some(item.span));
validate_crate_name(self.sess, orig_name.as_str(), Some(item.span));
orig_name
}
None => item.ident.name,

View file

@ -315,7 +315,7 @@ impl<'a> CrateLocator<'a> {
exact_paths: if hash.is_none() {
sess.opts
.externs
.get(&crate_name.as_str())
.get(crate_name.as_str())
.into_iter()
.filter_map(|entry| entry.files())
.flatten()
@ -976,7 +976,8 @@ impl CrateError {
let candidates = libraries
.iter()
.map(|lib| {
let crate_name = &lib.metadata.get_root().name().as_str();
let crate_name = lib.metadata.get_root().name();
let crate_name = crate_name.as_str();
let mut paths = lib.source.paths();
// This `unwrap()` should be okay because there has to be at least one
@ -1174,7 +1175,7 @@ impl CrateError {
} else if crate_name
== Symbol::intern(&sess.opts.debugging_opts.profiler_runtime)
{
err.note(&"the compiler may have been built without the profiler runtime");
err.note("the compiler may have been built without the profiler runtime");
} else if crate_name.as_str().starts_with("rustc_") {
err.help(
"maybe you need to install the missing components with: \

View file

@ -67,7 +67,7 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
Some(name) => name,
None => continue, // skip like historical compilers
};
lib.kind = match &*kind.as_str() {
lib.kind = match kind.as_str() {
"static" => NativeLibKind::Static { bundle: None, whole_archive: None },
"static-nobundle" => {
sess.struct_span_warn(

View file

@ -319,7 +319,7 @@ pub fn struct_lint_level<'s, 'd>(
}
LintLevelSource::Node(lint_attr_name, src, reason) => {
if let Some(rationale) = reason {
err.note(&rationale.as_str());
err.note(rationale.as_str());
}
sess.diag_span_note_once(
&mut err,

View file

@ -21,7 +21,7 @@ pub mod lib_features {
.map(|(f, s)| (*f, Some(*s)))
.chain(self.unstable.iter().map(|f| (*f, None)))
.collect();
all_features.sort_unstable_by_key(|f| f.0.as_str());
all_features.sort_unstable_by(|a, b| a.0.as_str().partial_cmp(b.0.as_str()).unwrap());
all_features
}
}

View file

@ -131,8 +131,7 @@ pub fn report_unstable(
/// deprecated (i.e., whether X is not greater than the current rustc version).
pub fn deprecation_in_effect(depr: &Deprecation) -> bool {
let is_since_rustc_version = depr.is_since_rustc_version;
let since = depr.since.map(Symbol::as_str);
let since = since.as_deref();
let since = depr.since.as_ref().map(Symbol::as_str);
fn parse_version(ver: &str) -> Vec<u32> {
// We ignore non-integer components of the version (e.g., "nightly").
@ -197,7 +196,7 @@ fn deprecation_message(
let message = if is_in_effect {
format!("use of deprecated {} `{}`", kind, path)
} else {
let since = since.map(Symbol::as_str);
let since = since.as_ref().map(Symbol::as_str);
if since.as_deref() == Some("TBD") {
format!("use of {} `{}` that will be deprecated in a future Rust version", kind, path)

View file

@ -2444,7 +2444,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
CtorKind::Fictive => {
let mut struct_fmt = fmt.debug_struct(&name);
for (field, place) in iter::zip(&variant_def.fields, places) {
struct_fmt.field(&field.ident.as_str(), place);
struct_fmt.field(field.ident.as_str(), place);
}
struct_fmt.finish()
}
@ -2473,7 +2473,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}
@ -2493,7 +2493,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}

View file

@ -338,7 +338,7 @@ impl<'tcx> CodegenUnit<'tcx> {
}
pub fn work_product_id(&self) -> WorkProductId {
WorkProductId::from_cgu_name(&self.name().as_str())
WorkProductId::from_cgu_name(self.name().as_str())
}
pub fn work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
@ -470,7 +470,7 @@ impl CodegenUnitNameBuilder<'tcx> {
if self.tcx.sess.opts.debugging_opts.human_readable_cgu_names {
cgu_name
} else {
Symbol::intern(&CodegenUnit::mangle_name(&cgu_name.as_str()))
Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
}
}

View file

@ -1216,8 +1216,8 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn consider_optimizing<T: Fn() -> String>(self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE).as_str();
self.sess.consider_optimizing(&cname, msg)
let cname = self.crate_name(LOCAL_CRATE);
self.sess.consider_optimizing(cname.as_str(), msg)
}
/// Obtain all lang items of this crate and all dependencies (recursively)

View file

@ -303,7 +303,7 @@ pub trait PrettyPrinter<'tcx>:
match self.tcx().trimmed_def_paths(()).get(&def_id) {
None => Ok((self, false)),
Some(symbol) => {
self.write_str(&symbol.as_str())?;
self.write_str(symbol.as_str())?;
Ok((self, true))
}
}

View file

@ -3,7 +3,7 @@ use std::cmp;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder};
use rustc_span::symbol::{Symbol, SymbolStr};
use rustc_span::symbol::Symbol;
use super::PartitioningCx;
use crate::partitioning::PreInliningPartitioning;
@ -24,11 +24,11 @@ pub fn merge_codegen_units<'tcx>(
// smallest into each other) we're sure to start off with a deterministic
// order (sorted by name). This'll mean that if two cgus have the same size
// the stable sort below will keep everything nice and deterministic.
codegen_units.sort_by_cached_key(|cgu| cgu.name().as_str());
codegen_units.sort_by(|a, b| a.name().as_str().partial_cmp(b.name().as_str()).unwrap());
// This map keeps track of what got merged into what.
let mut cgu_contents: FxHashMap<Symbol, Vec<SymbolStr>> =
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name().as_str()])).collect();
let mut cgu_contents: FxHashMap<Symbol, Vec<Symbol>> =
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name()])).collect();
// Merge the two smallest codegen units until the target size is reached.
while codegen_units.len() > cx.target_cgu_count {
@ -69,7 +69,7 @@ pub fn merge_codegen_units<'tcx>(
// were actually modified by merging.
.filter(|(_, cgu_contents)| cgu_contents.len() > 1)
.map(|(current_cgu_name, cgu_contents)| {
let mut cgu_contents: Vec<&str> = cgu_contents.iter().map(|s| &s[..]).collect();
let mut cgu_contents: Vec<&str> = cgu_contents.iter().map(|s| s.as_str()).collect();
// Sort the names, so things are deterministic and easy to
// predict.

View file

@ -208,7 +208,7 @@ pub fn partition<'tcx>(
internalization_candidates: _,
} = post_inlining;
result.sort_by_cached_key(|cgu| cgu.name().as_str());
result.sort_by(|a, b| a.name().as_str().partial_cmp(b.name().as_str()).unwrap());
result
}
@ -366,7 +366,7 @@ fn collect_and_partition_mono_items<'tcx>(
for cgu in codegen_units {
tcx.prof.artifact_size(
"codegen_unit_size_estimate",
&cgu.name().as_str()[..],
cgu.name().as_str(),
cgu.size_estimate() as u64,
);
}
@ -401,7 +401,7 @@ fn collect_and_partition_mono_items<'tcx>(
cgus.dedup();
for &(ref cgu_name, (linkage, _)) in cgus.iter() {
output.push(' ');
output.push_str(&cgu_name.as_str());
output.push_str(cgu_name.as_str());
let linkage_abbrev = match linkage {
Linkage::External => "External",

View file

@ -1639,7 +1639,7 @@ impl<'a> Parser<'a> {
next_token.kind
{
if self.token.span.hi() == next_token.span.lo() {
let s = String::from("0.") + &symbol.as_str();
let s = String::from("0.") + symbol.as_str();
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
return Some(Token::new(kind, self.token.span.to(next_token.span)));
}
@ -1710,7 +1710,8 @@ impl<'a> Parser<'a> {
);
}
LitError::InvalidIntSuffix => {
let suf = suffix.expect("suffix error with no suffix").as_str();
let suf = suffix.expect("suffix error with no suffix");
let suf = suf.as_str();
if looks_like_width_suffix(&['i', 'u'], &suf) {
// If it looks like a width, try to be helpful.
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
@ -1726,8 +1727,9 @@ impl<'a> Parser<'a> {
}
}
LitError::InvalidFloatSuffix => {
let suf = suffix.expect("suffix error with no suffix").as_str();
if looks_like_width_suffix(&['f'], &suf) {
let suf = suffix.expect("suffix error with no suffix");
let suf = suf.as_str();
if looks_like_width_suffix(&['f'], suf) {
// If it looks like a width, try to be helpful.
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
self.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit();

View file

@ -607,7 +607,7 @@ impl CheckAttrVisitor<'_> {
return err_fn(meta.span(), &format!("isn't allowed on {}", err));
}
let item_name = self.tcx.hir().name(hir_id);
if &*item_name.as_str() == doc_alias {
if item_name.as_str() == doc_alias {
return err_fn(meta.span(), "is the same as the item's name");
}
let span = meta.span();
@ -636,7 +636,7 @@ impl CheckAttrVisitor<'_> {
LitKind::Str(s, _) => {
if !self.check_doc_alias_value(
v,
&s.as_str(),
s.as_str(),
hir_id,
target,
true,

View file

@ -1464,7 +1464,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
if name == kw::Empty {
return None;
}
let name: &str = &name.as_str();
let name = name.as_str();
if name.as_bytes()[0] == b'_' {
return None;
}

View file

@ -61,8 +61,8 @@ impl<'p, 'c, 'tcx> QueryKeyStringBuilder<'p, 'c, 'tcx> {
match def_key.disambiguated_data.data {
DefPathData::CrateRoot => {
crate_name = self.tcx.crate_name(def_id.krate).as_str();
name = &*crate_name;
crate_name = self.tcx.crate_name(def_id.krate);
name = crate_name.as_str();
dis = "";
end_index = 3;
}

View file

@ -1,4 +1,3 @@
use std::cmp::Reverse;
use std::ptr;
use rustc_ast::{self as ast, Path};
@ -784,7 +783,7 @@ impl<'a> Resolver<'a> {
});
// Make sure error reporting is deterministic.
suggestions.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
suggestions.sort_by(|a, b| a.candidate.as_str().partial_cmp(b.candidate.as_str()).unwrap());
match find_best_match_for_name(
&suggestions.iter().map(|suggestion| suggestion.candidate).collect::<Vec<Symbol>>(),
@ -1186,7 +1185,7 @@ impl<'a> Resolver<'a> {
("", " from prelude")
} else if b.is_extern_crate()
&& !b.is_import()
&& self.session.opts.externs.get(&ident.as_str()).is_some()
&& self.session.opts.externs.get(ident.as_str()).is_some()
{
("", " passed with `--extern`")
} else if add_built_in {
@ -1481,12 +1480,12 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
return None;
}
// Sort extern crate names in reverse order to get
// Sort extern crate names in *reverse* order to get
// 1) some consistent ordering for emitted diagnostics, and
// 2) `std` suggestions before `core` suggestions.
let mut extern_crate_names =
self.r.extern_prelude.iter().map(|(ident, _)| ident.name).collect::<Vec<_>>();
extern_crate_names.sort_by_key(|name| Reverse(name.as_str()));
extern_crate_names.sort_by(|a, b| b.as_str().partial_cmp(a.as_str()).unwrap());
for name in extern_crate_names.into_iter() {
// Replace first ident with a crate name and check if that is valid.

View file

@ -231,7 +231,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let is_assoc_fn = self.self_type_is_available(span);
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
if ["this", "my"].contains(&&*item_str.as_str()) && is_assoc_fn {
if ["this", "my"].contains(&item_str.as_str()) && is_assoc_fn {
err.span_suggestion_short(
span,
"you might have meant to use `self` here instead",
@ -1358,7 +1358,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let name = path[path.len() - 1].ident.name;
// Make sure error reporting is deterministic.
names.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
names.sort_by(|a, b| a.candidate.as_str().partial_cmp(b.candidate.as_str()).unwrap());
match find_best_match_for_name(
&names.iter().map(|suggestion| suggestion.candidate).collect::<Vec<Symbol>>(),
@ -1377,7 +1377,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn likely_rust_type(path: &[Segment]) -> Option<Symbol> {
let name = path[path.len() - 1].ident.as_str();
// Common Java types
Some(match &*name {
Some(match name {
"byte" => sym::u8, // In Java, bytes are signed, but in practice one almost always wants unsigned bytes.
"short" => sym::i16,
"boolean" => sym::bool,
@ -2345,7 +2345,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
_ => None,
});
}
suggest_existing(err, &name.as_str()[..], suggs);
suggest_existing(err, name.as_str(), suggs);
}
[] => {
let mut suggs = Vec::new();

View file

@ -689,11 +689,11 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
hir_id: hir::HirId,
) {
let name = match fk {
intravisit::FnKind::ItemFn(id, _, _, _) => id.as_str(),
intravisit::FnKind::Method(id, _, _) => id.as_str(),
intravisit::FnKind::Closure => Symbol::intern("closure").as_str(),
intravisit::FnKind::ItemFn(id, _, _, _) => id.name,
intravisit::FnKind::Method(id, _, _) => id.name,
intravisit::FnKind::Closure => sym::closure,
};
let name: &str = &name;
let name = name.as_str();
let span = span!(Level::DEBUG, "visit_fn", name);
let _enter = span.enter();
match fk {

View file

@ -3481,7 +3481,7 @@ fn names_to_string(names: &[Symbol]) -> String {
if Ident::with_dummy_span(*name).is_raw_guess() {
result.push_str("r#");
}
result.push_str(&name.as_str());
result.push_str(name.as_str());
}
result
}

View file

@ -105,7 +105,7 @@ fn fast_print_path(path: &ast::Path) -> Symbol {
path_str.push_str("::");
}
if segment.ident.name != kw::PathRoot {
path_str.push_str(&segment.ident.as_str())
path_str.push_str(segment.ident.as_str())
}
}
Symbol::intern(&path_str)

View file

@ -825,7 +825,7 @@ impl<'tcx> SaveContext<'tcx> {
for attr in attrs {
if let Some(val) = attr.doc_str() {
// FIXME: Should save-analysis beautify doc strings itself or leave it to users?
result.push_str(&beautify_doc_string(val).as_str());
result.push_str(beautify_doc_string(val).as_str());
result.push('\n');
}
}

View file

@ -616,7 +616,7 @@ impl<'hir> Sig for hir::Generics<'hir> {
if let hir::GenericParamKind::Const { .. } = param.kind {
param_text.push_str("const ");
}
param_text.push_str(&param.name.ident().as_str());
param_text.push_str(param.name.ident().as_str());
defs.push(SigElement {
id: id_from_hir_id(param.hir_id, scx),
start: offset + text.len(),

View file

@ -60,7 +60,7 @@ pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input)
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, name)) = attr_crate_name {
if name.as_str() != *s {
if name.as_str() != s {
let msg = format!(
"`--crate-name` and `#[crate_name]` are \
required to match, but `{}` != `{}`",

View file

@ -55,7 +55,7 @@ pub fn find_best_match_for_name(
lookup: Symbol,
dist: Option<usize>,
) -> Option<Symbol> {
let lookup = &lookup.as_str();
let lookup = lookup.as_str();
let max_dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3);
// Priority of matches:
@ -70,7 +70,7 @@ pub fn find_best_match_for_name(
let levenshtein_match = name_vec
.iter()
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
let dist = lev_distance(lookup, name.as_str());
if dist <= max_dist { Some((name, dist)) } else { None }
})
// Here we are collecting the next structure:
@ -88,7 +88,7 @@ pub fn find_best_match_for_name(
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
iter_names.iter().fold(None, |result, candidate| {
if sort_by_words(&candidate.as_str()) == sort_by_words(lookup) {
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) {
Some(*candidate)
} else {
result

View file

@ -1512,9 +1512,12 @@ impl Ident {
Ident::new(self.name, self.span.normalize_to_macro_rules())
}
/// Convert the name to a `SymbolStr`. This is a slowish operation because
/// it requires locking the symbol interner.
pub fn as_str(self) -> SymbolStr {
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
///
/// Note that the lifetime of the return value is a lie. See
/// `Symbol::as_str()` for details.
pub fn as_str(&self) -> &str {
self.name.as_str()
}
}
@ -1650,12 +1653,17 @@ impl Symbol {
with_session_globals(|session_globals| session_globals.symbol_interner.intern(string))
}
/// Convert to a `SymbolStr`. This is a slowish operation because it
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
pub fn as_str(self) -> SymbolStr {
with_session_globals(|session_globals| {
let symbol_str = session_globals.symbol_interner.get(self);
unsafe { SymbolStr { string: std::mem::transmute::<&str, &str>(symbol_str) } }
///
/// Note that the lifetime of the return value is a lie. It's not the same
/// as `&self`, but actually tied to the lifetime of the underlying
/// interner. Interners are long-lived, and there are very few of them, and
/// this function is typically used for short-lived things, so in practice
/// it works out ok.
pub fn as_str(&self) -> &str {
with_session_globals(|session_globals| unsafe {
std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self))
})
}
@ -1678,19 +1686,19 @@ impl Symbol {
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.as_str(), f)
fmt::Debug::fmt(self.as_str(), f)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
fmt::Display::fmt(self.as_str(), f)
}
}
impl<S: Encoder> Encodable<S> for Symbol {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
s.emit_str(self.as_str())
}
}
@ -1709,11 +1717,10 @@ impl<CTX> HashStable<CTX> for Symbol {
}
impl<CTX> ToStableHashKey<CTX> for Symbol {
type KeyType = SymbolStr;
type KeyType = String;
#[inline]
fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr {
self.as_str()
fn to_stable_hash_key(&self, _: &CTX) -> String {
self.as_str().to_string()
}
}
@ -1905,70 +1912,3 @@ impl Ident {
self.name.can_be_raw() && self.is_reserved()
}
}
/// An alternative to [`Symbol`], useful when the chars within the symbol need to
/// be accessed. It deliberately has limited functionality and should only be
/// used for temporary values.
///
/// Because the interner outlives any thread which uses this type, we can
/// safely treat `string` which points to interner data, as an immortal string,
/// as long as this type never crosses between threads.
//
// FIXME: ensure that the interner outlives any thread which uses `SymbolStr`,
// by creating a new thread right after constructing the interner.
#[derive(Clone, Eq, PartialOrd, Ord)]
pub struct SymbolStr {
string: &'static str,
}
// This impl allows a `SymbolStr` to be directly equated with a `String` or
// `&str`.
impl<T: std::ops::Deref<Target = str>> std::cmp::PartialEq<T> for SymbolStr {
fn eq(&self, other: &T) -> bool {
self.string == other.deref()
}
}
impl !Send for SymbolStr {}
impl !Sync for SymbolStr {}
/// This impl means that if `ss` is a `SymbolStr`:
/// - `*ss` is a `str`;
/// - `&*ss` is a `&str` (and `match &*ss { ... }` is a common pattern).
/// - `&ss as &str` is a `&str`, which means that `&ss` can be passed to a
/// function expecting a `&str`.
impl std::ops::Deref for SymbolStr {
type Target = str;
#[inline]
fn deref(&self) -> &str {
self.string
}
}
impl fmt::Debug for SymbolStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.string, f)
}
}
impl fmt::Display for SymbolStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self.string, f)
}
}
impl<CTX> HashStable<CTX> for SymbolStr {
#[inline]
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
self.string.hash_stable(hcx, hasher)
}
}
impl<CTX> ToStableHashKey<CTX> for SymbolStr {
type KeyType = SymbolStr;
#[inline]
fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr {
self.clone()
}
}

View file

@ -255,7 +255,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolPrinter<'tcx> {
}
fn path_crate(self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.write_str(&self.tcx.crate_name(cnum).as_str())?;
self.write_str(self.tcx.crate_name(cnum).as_str())?;
Ok(self)
}
fn path_qualified(

View file

@ -9,6 +9,7 @@ use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::print::{Print, Printer};
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst};
use rustc_middle::ty::{self, FloatTy, Instance, IntTy, Ty, TyCtxt, TypeFoldable, UintTy};
use rustc_span::symbol::kw;
use rustc_target::abi::call::FnAbi;
use rustc_target::abi::Integer;
use rustc_target::spec::abi::Abi;
@ -559,7 +560,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
ty::ExistentialPredicate::Projection(projection) => {
let name = cx.tcx.associated_item(projection.item_def_id).ident;
cx.push("p");
cx.push_ident(&name.as_str());
cx.push_ident(name.as_str());
cx = projection.ty.print(cx)?;
}
ty::ExistentialPredicate::AutoTrait(def_id) => {
@ -702,12 +703,11 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
// just to be able to handle disambiguators.
let disambiguated_field =
self.tcx.def_key(field_def.did).disambiguated_data;
let field_name =
disambiguated_field.data.get_opt_name().map(|s| s.as_str());
let field_name = disambiguated_field.data.get_opt_name();
self.push_disambiguator(
disambiguated_field.disambiguator as u64,
);
self.push_ident(&field_name.as_ref().map_or("", |s| &s[..]));
self.push_ident(field_name.unwrap_or(kw::Empty).as_str());
self = field.print(self)?;
}
@ -736,8 +736,8 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
self.push("C");
let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id();
self.push_disambiguator(stable_crate_id.to_u64());
let name = self.tcx.crate_name(cnum).as_str();
self.push_ident(&name);
let name = self.tcx.crate_name(cnum);
self.push_ident(name.as_str());
Ok(self)
}
@ -793,13 +793,13 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
}
};
let name = disambiguated_data.data.get_opt_name().map(|s| s.as_str());
let name = disambiguated_data.data.get_opt_name();
self.path_append_ns(
print_prefix,
ns,
disambiguated_data.disambiguator as u64,
name.as_ref().map_or("", |s| &s[..]),
name.unwrap_or(kw::Empty).as_str(),
)
}

View file

@ -298,43 +298,43 @@ impl InlineAsmReg {
let name = name.as_str();
Ok(match arch {
InlineAsmArch::X86 | InlineAsmArch::X86_64 => {
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Arm => {
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::AArch64 => {
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Nvptx64 => {
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::PowerPC | InlineAsmArch::PowerPC64 => {
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Hexagon => {
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Mips | InlineAsmArch::Mips64 => {
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::S390x => {
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::SpirV => {
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Wasm32 | InlineAsmArch::Wasm64 => {
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Bpf => {
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Avr => {
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, name)?)
}
})
}
@ -798,7 +798,7 @@ impl InlineAsmClobberAbi {
target: &Target,
name: Symbol,
) -> Result<Self, &'static [&'static str]> {
let name = &*name.as_str();
let name = name.as_str();
match arch {
InlineAsmArch::X86 => match name {
"C" | "system" | "efiapi" | "cdecl" | "stdcall" | "fastcall" => {

View file

@ -269,7 +269,7 @@ impl<'tcx> OnUnimplementedFormatString {
let name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(trait_def_id);
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut result = Ok(());
for token in parser {
match token {
@ -347,7 +347,7 @@ impl<'tcx> OnUnimplementedFormatString {
let empty_string = String::new();
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser
.map(|p| match p {

View file

@ -609,7 +609,7 @@ fn check_must_not_suspend_def(
// Add optional reason note
if let Some(note) = attr.value_str() {
// FIXME(guswynn): consider formatting this better
err.span_note(data.source_span, &note.as_str());
err.span_note(data.source_span, note.as_str());
}
// Add some quick suggestions on what to do

View file

@ -1038,7 +1038,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
.collect();
// Sort them by the name so we have a stable result.
names.sort_by_cached_key(|n| n.as_str());
names.sort_by(|a, b| a.as_str().partial_cmp(b.as_str()).unwrap());
names
}
@ -1908,7 +1908,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
.associated_items(def_id)
.in_definition_order()
.filter(|x| {
let dist = lev_distance(&*name.as_str(), &x.ident.as_str());
let dist = lev_distance(name.as_str(), x.ident.as_str());
x.kind.namespace() == Namespace::ValueNS && dist > 0 && dist <= max_dist
})
.copied()

View file

@ -2849,7 +2849,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
);
} else if attr.has_name(sym::linkage) {
if let Some(val) = attr.value_str() {
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, &val.as_str()));
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, val.as_str()));
}
} else if attr.has_name(sym::link_section) {
if let Some(val) = attr.value_str() {