1
Fork 0

Auto merge of #103978 - matthiaskrgr:rollup-iym9kmg, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #103367 (Remove std's transitive dependency on cfg-if 0.1)
 - #103397 (Port `dead_code` lints to be translatable.)
 - #103681 (libtest: run all tests in their own thread, if supported by the host)
 - #103792 (Migrate `codegen_ssa` to diagnostics structs - [Part 2])
 - #103897 (asm: Work around LLVM bug on AArch64)
 - #103937 (minor changes to make method lookup diagnostic code easier to read)
 - #103958 (Test tidy should not count untracked paths towards entries limit)
 - #103964 (Give a specific lint for unsafety not being inherited)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-11-04 19:43:36 +00:00
commit 09508489ef
39 changed files with 811 additions and 407 deletions

View file

@ -1526,11 +1526,11 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.1.14"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
@ -2478,7 +2478,7 @@ name = "panic_abort"
version = "0.0.0"
dependencies = [
"alloc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",
@ -2489,7 +2489,7 @@ name = "panic_unwind"
version = "0.0.0"
dependencies = [
"alloc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",
@ -2817,7 +2817,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.14",
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
@ -2861,7 +2861,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.14",
"getrandom 0.1.16",
]
[[package]]
@ -4937,6 +4937,7 @@ name = "tidy"
version = "0.1.0"
dependencies = [
"cargo_metadata 0.14.0",
"ignore",
"lazy_static",
"miropt-test-tools",
"regex",
@ -5357,7 +5358,7 @@ name = "unwind"
version = "0.0.0"
dependencies = [
"cc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",

View file

@ -505,6 +505,44 @@ fn xmm_reg_index(reg: InlineAsmReg) -> Option<u32> {
}
}
/// If the register is an AArch64 integer register then return its index.
fn a64_reg_index(reg: InlineAsmReg) -> Option<u32> {
match reg {
InlineAsmReg::AArch64(AArch64InlineAsmReg::x0) => Some(0),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x1) => Some(1),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x2) => Some(2),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x3) => Some(3),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x4) => Some(4),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x5) => Some(5),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x6) => Some(6),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x7) => Some(7),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x8) => Some(8),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x9) => Some(9),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x10) => Some(10),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x11) => Some(11),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x12) => Some(12),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x13) => Some(13),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x14) => Some(14),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x15) => Some(15),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x16) => Some(16),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x17) => Some(17),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x18) => Some(18),
// x19 is reserved
InlineAsmReg::AArch64(AArch64InlineAsmReg::x20) => Some(20),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x21) => Some(21),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x22) => Some(22),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x23) => Some(23),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x24) => Some(24),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x25) => Some(25),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x26) => Some(26),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x27) => Some(27),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x28) => Some(28),
// x29 is reserved
InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) => Some(30),
_ => None,
}
}
/// If the register is an AArch64 vector register then return its index.
fn a64_vreg_index(reg: InlineAsmReg) -> Option<u32> {
match reg {
@ -535,6 +573,22 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'x'
};
format!("{{{}mm{}}}", class, idx)
} else if let Some(idx) = a64_reg_index(reg) {
let class = if let Some(layout) = layout {
match layout.size.bytes() {
8 => 'x',
_ => 'w',
}
} else {
// We use i32 as the type for discarded outputs
'w'
};
if class == 'x' && reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
// LLVM doesn't recognize x30. use lr instead.
"{lr}".to_string()
} else {
format!("{{{}{}}}", class, idx)
}
} else if let Some(idx) = a64_vreg_index(reg) {
let class = if let Some(layout) = layout {
match layout.size.bytes() {
@ -550,9 +604,6 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'q'
};
format!("{{{}{}}}", class, idx)
} else if reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
// LLVM doesn't recognize x30
"{lr}".to_string()
} else if reg == InlineAsmReg::Arm(ArmInlineAsmReg::r14) {
// LLVM doesn't recognize r14
"{lr}".to_string()

View file

@ -6,11 +6,12 @@ use rustc_span::symbol::Symbol;
use object::read::archive::ArchiveFile;
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
use crate::errors::ExtractBundledLibsError;
pub trait ArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
@ -28,32 +29,35 @@ pub trait ArchiveBuilderBuilder {
is_direct_dependency: bool,
) -> PathBuf;
fn extract_bundled_libs(
&self,
rlib: &Path,
fn extract_bundled_libs<'a>(
&'a self,
rlib: &'a Path,
outdir: &Path,
bundled_lib_file_names: &FxHashSet<Symbol>,
) -> Result<(), String> {
let message = |msg: &str, e: &dyn Display| format!("{} '{}': {}", msg, &rlib.display(), e);
) -> Result<(), ExtractBundledLibsError<'_>> {
let archive_map = unsafe {
Mmap::map(File::open(rlib).map_err(|e| message("failed to open file", &e))?)
.map_err(|e| message("failed to mmap file", &e))?
Mmap::map(
File::open(rlib)
.map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
)
.map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
};
let archive = ArchiveFile::parse(&*archive_map)
.map_err(|e| message("failed to parse archive", &e))?;
.map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
for entry in archive.members() {
let entry = entry.map_err(|e| message("failed to read entry", &e))?;
let entry = entry
.map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
let data = entry
.data(&*archive_map)
.map_err(|e| message("failed to get data from archive member", &e))?;
.map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
let name = std::str::from_utf8(entry.name())
.map_err(|e| message("failed to convert name", &e))?;
.map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
continue; // We need to extract only native libraries.
}
std::fs::write(&outdir.join(&name), data)
.map_err(|e| message("failed to write file", &e))?;
.map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
}
Ok(())
}

View file

@ -919,29 +919,17 @@ fn link_natively<'a>(
)
.is_some();
sess.note_without_error("`link.exe` returned an unexpected error");
sess.emit_note(errors::LinkExeUnexpectedError);
if is_vs_installed && has_linker {
// the linker is broken
sess.note_without_error(
"the Visual Studio build tools may need to be repaired \
using the Visual Studio installer",
);
sess.note_without_error(
"or a necessary component may be missing from the \
\"C++ build tools\" workload",
);
sess.emit_note(errors::RepairVSBuildTools);
sess.emit_note(errors::MissingCppBuildToolComponent);
} else if is_vs_installed {
// the linker is not installed
sess.note_without_error(
"in the Visual Studio installer, ensure the \
\"C++ build tools\" workload is selected",
);
sess.emit_note(errors::SelectCppBuildToolWorkload);
} else {
// visual studio is not installed
sess.note_without_error(
"you may need to install Visual Studio build tools with the \
\"C++ build tools\" workload",
);
sess.emit_note(errors::VisualStudioNotInstalled);
}
}
}
@ -954,35 +942,20 @@ fn link_natively<'a>(
Err(e) => {
let linker_not_found = e.kind() == io::ErrorKind::NotFound;
let mut linker_error = {
if linker_not_found {
sess.struct_err(&format!("linker `{}` not found", linker_path.display()))
} else {
sess.struct_err(&format!(
"could not exec the linker `{}`",
linker_path.display()
))
}
};
linker_error.note(&e.to_string());
if !linker_not_found {
linker_error.note(&format!("{:?}", &cmd));
if linker_not_found {
sess.emit_err(errors::LinkerNotFound { linker_path, error: e });
} else {
sess.emit_err(errors::UnableToExeLinker {
linker_path,
error: e,
command_formatted: format!("{:?}", &cmd),
});
}
linker_error.emit();
if sess.target.is_like_msvc && linker_not_found {
sess.note_without_error(
"the msvc targets depend on the msvc linker \
but `link.exe` was not found",
);
sess.note_without_error(
"please ensure that Visual Studio 2017 or later, or Build Tools \
for Visual Studio were installed with the Visual C++ option.",
);
sess.note_without_error("VS Code is a different product, and is not sufficient.");
sess.emit_note(errors::MsvcMissingLinker);
sess.emit_note(errors::CheckInstalledVisualStudio);
sess.emit_note(errors::UnsufficientVSCodeProduct);
}
sess.abort_if_errors();
}
@ -1007,15 +980,13 @@ fn link_natively<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
sess.struct_warn(&format!(
"processing debug info with `dsymutil` failed: {}",
prog.status
))
.note(&escape_string(&output))
.emit();
sess.emit_warning(errors::ProcessingDymutilFailed {
status: prog.status,
output: escape_string(&output),
});
}
}
Err(e) => sess.fatal(&format!("unable to run `dsymutil`: {}", e)),
Err(error) => sess.emit_fatal(errors::UnableToRunDsymutil { error }),
}
}
@ -1092,15 +1063,14 @@ fn strip_symbols_with_external_utility<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
sess.struct_warn(&format!(
"stripping debug info with `{}` failed: {}",
util, prog.status
))
.note(&escape_string(&output))
.emit();
sess.emit_warning(errors::StrippingDebugInfoFailed {
util,
status: prog.status,
output: escape_string(&output),
});
}
}
Err(e) => sess.fatal(&format!("unable to run `{}`: {}", util, e)),
Err(error) => sess.emit_fatal(errors::UnableToRun { util, error }),
}
}
@ -1251,7 +1221,7 @@ pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
)),
(Some(linker), None) => {
let stem = linker.file_stem().and_then(|stem| stem.to_str()).unwrap_or_else(|| {
sess.fatal("couldn't extract file stem from specified linker")
sess.emit_fatal(errors::LinkerFileStem);
});
let flavor = if stem == "emcc" {
@ -1378,13 +1348,9 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
})
.collect();
if !lib_args.is_empty() {
sess.note_without_error(
"Link against the following native artifacts when linking \
against this static library. The order and any duplication \
can be significant on some platforms.",
);
sess.emit_note(errors::StaticLibraryNativeArtifacts);
// Prefix for greppability
sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" ")));
sess.emit_note(errors::NativeStaticLibs { arguments: lib_args.join(" ") });
}
}
@ -1688,14 +1654,14 @@ fn add_link_script(cmd: &mut dyn Linker, sess: &Session, tmpdir: &Path, crate_ty
match (crate_type, &sess.target.link_script) {
(CrateType::Cdylib | CrateType::Executable, Some(script)) => {
if !sess.target.linker_flavor.is_gnu() {
sess.fatal("can only use link script when linking with GNU-like linker");
sess.emit_fatal(errors::LinkScriptUnavailable);
}
let file_name = ["rustc", &sess.target.llvm_target, "linkfile.ld"].join("-");
let path = tmpdir.join(file_name);
if let Err(e) = fs::write(&path, script.as_ref()) {
sess.fatal(&format!("failed to write link script to {}: {}", path.display(), e));
if let Err(error) = fs::write(&path, script.as_ref()) {
sess.emit_fatal(errors::LinkScriptWriteFailure { path, error });
}
cmd.arg("--script");
@ -1841,8 +1807,8 @@ fn add_linked_symbol_object(
let path = tmpdir.join("symbols.o");
let result = std::fs::write(&path, file.write().unwrap());
if let Err(e) = result {
sess.fatal(&format!("failed to write {}: {}", path.display(), e));
if let Err(error) = result {
sess.emit_fatal(errors::FailedToWrite { path, error });
}
cmd.add_object(&path);
}
@ -2299,14 +2265,10 @@ fn collect_natvis_visualizers(
visualizer_paths.push(visualizer_out_file);
}
Err(error) => {
sess.warn(
format!(
"Unable to write debugger visualizer file `{}`: {} ",
visualizer_out_file.display(),
error
)
.as_str(),
);
sess.emit_warning(errors::UnableToWriteDebuggerVisualizer {
path: visualizer_out_file,
error,
});
}
};
}
@ -2484,7 +2446,7 @@ fn add_upstream_rust_crates<'a>(
let rlib = &src.rlib.as_ref().unwrap().0;
archive_builder_builder
.extract_bundled_libs(rlib, tmpdir, &bundled_libs)
.unwrap_or_else(|e| sess.fatal(e));
.unwrap_or_else(|e| sess.emit_fatal(e));
}
let mut last = (None, NativeLibKind::Unspecified, None);
@ -2641,7 +2603,7 @@ fn add_upstream_rust_crates<'a>(
|| !codegen_results.crate_info.is_no_builtins.contains(&cnum);
let mut archive = archive_builder_builder.new_archive_builder(sess);
if let Err(e) = archive.add_archive(
if let Err(error) = archive.add_archive(
cratepath,
Box::new(move |f| {
if f == METADATA_FILENAME {
@ -2681,7 +2643,7 @@ fn add_upstream_rust_crates<'a>(
false
}),
) {
sess.fatal(&format!("failed to build archive from rlib: {}", e));
sess.emit_fatal(errors::RlibArchiveBuildFailure { error });
}
if archive.build(&dst) {
link_upstream(&dst);
@ -2813,14 +2775,14 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
("arm", "watchos") => "watchos",
(_, "macos") => "macosx",
_ => {
sess.err(&format!("unsupported arch `{}` for os `{}`", arch, os));
sess.emit_err(errors::UnsupportedArch { arch, os });
return;
}
};
let sdk_root = match get_apple_sdk_root(sdk_name) {
Ok(s) => s,
Err(e) => {
sess.err(&e);
sess.emit_err(e);
return;
}
};
@ -2836,7 +2798,7 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
fn get_apple_sdk_root(sdk_name: &str) -> Result<String, errors::AppleSdkRootError<'_>> {
// Following what clang does
// (https://github.com/llvm/llvm-project/blob/
// 296a80102a9b72c3eda80558fb78a3ed8849b341/clang/lib/Driver/ToolChains/Darwin.cpp#L1661-L1678)
@ -2886,7 +2848,7 @@ fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
match res {
Ok(output) => Ok(output.trim().to_string()),
Err(e) => Err(format!("failed to get {} SDK path: {}", sdk_name, e)),
Err(error) => Err(errors::AppleSdkRootError::SdkPath { sdk_name, error }),
}
}
@ -2919,7 +2881,7 @@ fn add_gcc_ld_path(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
} else {
sess.fatal("option `-Z gcc-ld` is used even though linker flavor is not gcc");
sess.emit_fatal(errors::OptionGccOnly);
}
}
}

View file

@ -354,3 +354,170 @@ impl IntoDiagnostic<'_> for LinkingFailed<'_> {
diag
}
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_exe_unexpected_error)]
pub struct LinkExeUnexpectedError;
#[derive(Diagnostic)]
#[diag(codegen_ssa_repair_vs_build_tools)]
pub struct RepairVSBuildTools;
#[derive(Diagnostic)]
#[diag(codegen_ssa_missing_cpp_build_tool_component)]
pub struct MissingCppBuildToolComponent;
#[derive(Diagnostic)]
#[diag(codegen_ssa_select_cpp_build_tool_workload)]
pub struct SelectCppBuildToolWorkload;
#[derive(Diagnostic)]
#[diag(codegen_ssa_visual_studio_not_installed)]
pub struct VisualStudioNotInstalled;
#[derive(Diagnostic)]
#[diag(codegen_ssa_linker_not_found)]
#[note]
pub struct LinkerNotFound {
pub linker_path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_exe_linker)]
#[note]
#[note(command_note)]
pub struct UnableToExeLinker {
pub linker_path: PathBuf,
pub error: Error,
pub command_formatted: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_msvc_missing_linker)]
pub struct MsvcMissingLinker;
#[derive(Diagnostic)]
#[diag(codegen_ssa_check_installed_visual_studio)]
pub struct CheckInstalledVisualStudio;
#[derive(Diagnostic)]
#[diag(codegen_ssa_unsufficient_vs_code_product)]
pub struct UnsufficientVSCodeProduct;
#[derive(Diagnostic)]
#[diag(codegen_ssa_processing_dymutil_failed)]
#[note]
pub struct ProcessingDymutilFailed {
pub status: ExitStatus,
pub output: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_run_dsymutil)]
#[note]
pub struct UnableToRunDsymutil {
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_stripping_debu_info_failed)]
#[note]
pub struct StrippingDebugInfoFailed<'a> {
pub util: &'a str,
pub status: ExitStatus,
pub output: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_run)]
pub struct UnableToRun<'a> {
pub util: &'a str,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_linker_file_stem)]
pub struct LinkerFileStem;
#[derive(Diagnostic)]
#[diag(codegen_ssa_static_library_native_artifacts)]
pub struct StaticLibraryNativeArtifacts;
#[derive(Diagnostic)]
#[diag(codegen_ssa_native_static_libs)]
pub struct NativeStaticLibs {
pub arguments: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_script_unavailable)]
pub struct LinkScriptUnavailable;
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_script_write_failure)]
pub struct LinkScriptWriteFailure {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_failed_to_write)]
pub struct FailedToWrite {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_write_debugger_visualizer)]
pub struct UnableToWriteDebuggerVisualizer {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_rlib_archive_build_failure)]
pub struct RlibArchiveBuildFailure {
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_option_gcc_only)]
pub struct OptionGccOnly;
#[derive(Diagnostic)]
pub enum ExtractBundledLibsError<'a> {
#[diag(codegen_ssa_extract_bundled_libs_open_file)]
OpenFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_mmap_file)]
MmapFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_parse_archive)]
ParseArchive { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_read_entry)]
ReadEntry { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_archive_member)]
ArchiveMember { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_convert_name)]
ConvertName { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_write_file)]
WriteFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unsupported_arch)]
pub struct UnsupportedArch<'a> {
pub arch: &'a str,
pub os: &'a str,
}
#[derive(Diagnostic)]
pub enum AppleSdkRootError<'a> {
#[diag(codegen_ssa_apple_sdk_error_sdk_path)]
SdkPath { sdk_name: &'a str, error: Error },
}

View file

@ -119,3 +119,66 @@ codegen_ssa_thorin_object_read = {$error}
codegen_ssa_thorin_object_write = {$error}
codegen_ssa_thorin_gimli_read = {$error}
codegen_ssa_thorin_gimli_write = {$error}
codegen_ssa_link_exe_unexpected_error = `link.exe` returned an unexpected error
codegen_ssa_repair_vs_build_tools = the Visual Studio build tools may need to be repaired using the Visual Studio installer
codegen_ssa_missing_cpp_build_tool_component = or a necessary component may be missing from the "C++ build tools" workload
codegen_ssa_select_cpp_build_tool_workload = in the Visual Studio installer, ensure the "C++ build tools" workload is selected
codegen_ssa_visual_studio_not_installed = you may need to install Visual Studio build tools with the "C++ build tools" workload
codegen_ssa_linker_not_found = linker `{$linker_path}` not found
.note = {$error}
codegen_ssa_unable_to_exe_linker = could not exec the linker `{$linker_path}`
.note = {$error}
.command_note = {$command_formatted}
codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but `link.exe` was not found
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient.
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
.note = {$output}
codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
codegen_ssa_stripping_debu_info_failed = stripping debug info with `{$util}` failed: {$status}
.note = {$output}
codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}
codegen_ssa_linker_file_stem = couldn't extract file stem from specified linker
codegen_ssa_static_library_native_artifacts = Link against the following native artifacts when linking against this static library. The order and any duplication can be significant on some platforms.
codegen_ssa_native_static_libs = native-static-libs: {$arguments}
codegen_ssa_link_script_unavailable = can only use link script when linking with GNU-like linker
codegen_ssa_link_script_write_failure = failed to write link script to {$path}: {$error}
codegen_ssa_failed_to_write = failed to write {$path}: {$error}
codegen_ssa_unable_to_write_debugger_visualizer = Unable to write debugger visualizer file `{$path}`: {$error}
codegen_ssa_rlib_archive_build_failure = failed to build archive from rlib: {$error}
codegen_ssa_option_gcc_only = option `-Z gcc-ld` is used even though linker flavor is not gcc
codegen_ssa_extract_bundled_libs_open_file = failed to open file '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_mmap_file = failed to mmap file '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_parse_archive = failed to parse archive '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_read_entry = failed to read entry '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_archive_member = failed to get data from archive member '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_convert_name = failed to convert name '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_write_file = failed to write file '{$rlib}': {$error}
codegen_ssa_unsupported_arch = unsupported arch `{$arch}` for os `{$os}`
codegen_ssa_apple_sdk_error_sdk_path = failed to get {$sdk_name} SDK path: {error}

View file

@ -665,3 +665,36 @@ passes_missing_const_err =
attributes `#[rustc_const_unstable]` and `#[rustc_const_stable]` require the function or method to be `const`
.help = make the function or method const
.label = attribute specified here
passes_dead_codes =
{ $multiple ->
*[true] multiple {$descr}s are
[false] { $num ->
[one] {$descr} {$name_list} is
*[other] {$descr}s {$name_list} are
}
} never {$participle}
passes_change_fields_to_be_of_unit_type =
consider changing the { $num ->
[one] field
*[other] fields
} to be of unit type to suppress this warning while preserving the field numbering, or remove the { $num ->
[one] field
*[other] fields
}
passes_parent_info =
{$num ->
[one] {$descr}
*[other] {$descr}s
} in this {$parent_descr}
passes_ignored_derived_impls =
`{$name}` has {$trait_list_len ->
[one] a derived impl
*[other] derived impls
} for the {$trait_list_len ->
[one] trait {$trait_list}, but this is
*[other] traits {$trait_list}, but these are
} intentionally ignored during dead code analysis

View file

@ -11,8 +11,10 @@ use rustc_target::abi::TargetDataLayoutErrors;
use rustc_target::spec::{PanicStrategy, SplitDebuginfo, StackProtector, TargetTriple};
use std::borrow::Cow;
use std::fmt;
use std::fmt::Write;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
use std::process::ExitStatus;
pub struct DiagnosticArgFromDisplay<'a>(pub &'a dyn fmt::Display);
@ -58,6 +60,7 @@ into_diagnostic_arg_using_display!(
i128,
u128,
std::io::Error,
std::boxed::Box<dyn std::error::Error>,
std::num::NonZeroU32,
hir::Target,
Edition,
@ -66,7 +69,8 @@ into_diagnostic_arg_using_display!(
ParseIntError,
StackProtector,
&TargetTriple,
SplitDebuginfo
SplitDebuginfo,
ExitStatus,
);
impl IntoDiagnosticArg for bool {
@ -170,6 +174,37 @@ impl IntoDiagnosticArg for Level {
}
}
#[derive(Clone)]
pub struct DiagnosticSymbolList(Vec<Symbol>);
impl From<Vec<Symbol>> for DiagnosticSymbolList {
fn from(v: Vec<Symbol>) -> Self {
DiagnosticSymbolList(v)
}
}
impl IntoDiagnosticArg for DiagnosticSymbolList {
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
// FIXME: replace the logic here with a real list formatter
let symbols = match &self.0[..] {
[symbol] => format!("`{symbol}`"),
[symbol, last] => {
format!("`{symbol}` and `{last}`",)
}
[symbols @ .., last] => {
let mut result = String::new();
for symbol in symbols {
write!(result, "`{symbol}`, ").unwrap();
}
write!(result, "and `{last}`").unwrap();
result
}
[] => unreachable!(),
};
DiagnosticArgValue::Str(Cow::Owned(symbols))
}
}
impl IntoDiagnostic<'_, !> for TargetDataLayoutErrors<'_> {
fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder<'_, !> {
let mut diag;

View file

@ -376,7 +376,7 @@ pub use diagnostic::{
DiagnosticStyledString, IntoDiagnosticArg, SubDiagnostic,
};
pub use diagnostic_builder::{DiagnosticBuilder, EmissionGuarantee, Noted};
pub use diagnostic_impls::DiagnosticArgFromDisplay;
pub use diagnostic_impls::{DiagnosticArgFromDisplay, DiagnosticSymbolList};
use std::backtrace::Backtrace;
/// A handler deals with errors and other compiler output.

View file

@ -55,8 +55,7 @@ pub enum MethodError<'tcx> {
// not-in-scope traits which may work.
PrivateMatch(DefKind, DefId, Vec<DefId>),
// Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have
// forgotten to import a trait.
// Found a `Self: Sized` bound where `Self` is a trait object.
IllegalSizedBound(Vec<DefId>, bool, Span),
// Found a match, but the return type is wrong

View file

@ -1019,7 +1019,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let out_of_scope_traits = match self.pick_core() {
Some(Ok(p)) => vec![p.item.container_id(self.tcx)],
//Some(Ok(p)) => p.iter().map(|p| p.item.container().id()).collect(),
Some(Err(MethodError::Ambiguity(v))) => v
.into_iter()
.map(|source| match source {

View file

@ -248,7 +248,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match error {
MethodError::NoMatch(NoMatchData {
static_candidates: mut static_sources,
mut static_candidates,
unsatisfied_predicates,
out_of_scope_traits,
lev_candidate,
@ -288,9 +288,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if generics.len() > 0 {
let mut autoderef = self.autoderef(span, actual);
let candidate_found = autoderef.any(|(ty, _)| {
if let ty::Adt(adt_deref, _) = ty.kind() {
if let ty::Adt(adt_def, _) = ty.kind() {
self.tcx
.inherent_impls(adt_deref.did())
.inherent_impls(adt_def.did())
.iter()
.filter_map(|def_id| self.associated_value(*def_id, item_name))
.count()
@ -348,15 +348,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
let ty_span = match actual.kind() {
ty::Param(param_type) => {
let generics = self.tcx.generics_of(self.body_id.owner.to_def_id());
let type_param = generics.type_param(param_type, self.tcx);
Some(self.tcx.def_span(type_param.def_id))
}
ty::Param(param_type) => Some(
param_type.span_from_generics(self.tcx, self.body_id.owner.to_def_id()),
),
ty::Adt(def, _) if def.did().is_local() => Some(tcx.def_span(def.did())),
_ => None,
};
if let Some(span) = ty_span {
err.span_label(
span,
@ -386,7 +383,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut custom_span_label = false;
if !static_sources.is_empty() {
if !static_candidates.is_empty() {
err.note(
"found the following associated functions; to be used as methods, \
functions must have a `self` parameter",
@ -394,9 +391,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(span, "this is an associated function, not a method");
custom_span_label = true;
}
if static_sources.len() == 1 {
if static_candidates.len() == 1 {
let ty_str =
if let Some(CandidateSource::Impl(impl_did)) = static_sources.get(0) {
if let Some(CandidateSource::Impl(impl_did)) = static_candidates.get(0) {
// When the "method" is resolved through dereferencing, we really want the
// original type that has the associated function for accurate suggestions.
// (#61411)
@ -422,9 +419,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.help(&format!("try with `{}::{}`", ty_str, item_name,));
}
report_candidates(span, &mut err, &mut static_sources, sugg_span);
} else if static_sources.len() > 1 {
report_candidates(span, &mut err, &mut static_sources, sugg_span);
report_candidates(span, &mut err, &mut static_candidates, sugg_span);
} else if static_candidates.len() > 1 {
report_candidates(span, &mut err, &mut static_candidates, sugg_span);
}
let mut bound_spans = vec![];
@ -496,24 +493,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let (ty::Param(_), ty::PredicateKind::Trait(p)) =
(self_ty.kind(), parent_pred.kind().skip_binder())
{
let hir = self.tcx.hir();
let node = match p.trait_ref.self_ty().kind() {
ty::Param(_) => {
// Account for `fn` items like in `issue-35677.rs` to
// suggest restricting its type params.
let did = self.tcx.hir().body_owner_def_id(hir::BodyId {
hir_id: self.body_id,
});
Some(
self.tcx
.hir()
.get(self.tcx.hir().local_def_id_to_hir_id(did)),
)
let parent_body =
hir.body_owner(hir::BodyId { hir_id: self.body_id });
Some(hir.get(parent_body))
}
ty::Adt(def, _) => {
def.did().as_local().map(|def_id| hir.get_by_def_id(def_id))
}
ty::Adt(def, _) => def.did().as_local().map(|def_id| {
self.tcx
.hir()
.get(self.tcx.hir().local_def_id_to_hir_id(def_id))
}),
_ => None,
};
if let Some(hir::Node::Item(hir::Item { kind, .. })) = node {
@ -605,7 +596,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.iter()
.filter_map(|(p, parent, c)| c.as_ref().map(|c| (p, parent, c)))
.filter_map(|(p, parent, c)| match c.code() {
ObligationCauseCode::ImplDerivedObligation(ref data) => {
ObligationCauseCode::ImplDerivedObligation(data) => {
Some((&data.derived, p, parent, data.impl_def_id, data))
}
_ => None,
@ -620,22 +611,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match self.tcx.hir().get_if_local(impl_def_id) {
// Unmet obligation comes from a `derive` macro, point at it once to
// avoid multiple span labels pointing at the same place.
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Trait(..),
ident,
..
})) if matches!(
ident.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) =>
{
let span = ident.span.ctxt().outer_expn_data().call_site;
let mut spans: MultiSpan = span.into();
spans.push_span_label(span, derive_msg);
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { of_trait, self_ty, .. }),
..
@ -659,34 +634,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
// Unmet obligation coming from a `trait`.
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Trait(..),
ident,
span: item_span,
..
})) if !matches!(
ident.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) =>
{
if let Some(pred) = parent_p {
// Done to add the "doesn't satisfy" `span_label`.
let _ = format_pred(*pred);
}
skip_list.insert(p);
let mut spans = if cause.span != *item_span {
let mut spans: MultiSpan = cause.span.into();
spans.push_span_label(cause.span, unsatisfied_msg);
spans
} else {
ident.span.into()
};
spans.push_span_label(ident.span, "in this trait");
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
// Unmet obligation coming from an `impl`.
Some(Node::Item(hir::Item {
kind:
@ -695,19 +642,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}),
span: item_span,
..
})) if !matches!(
self_ty.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) && !matches!(
of_trait.as_ref().map(|t| t
.path
.span
.ctxt()
.outer_expn_data()
.kind),
Some(ExpnKind::Macro(MacroKind::Derive, _))
) =>
{
})) => {
let sized_pred =
unsatisfied_predicates.iter().any(|(pred, _, _)| {
match pred.kind().skip_binder() {
@ -759,7 +694,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
_ => {}
Some(_) => unreachable!(),
None => (),
}
}
let mut spanned_predicates: Vec<_> = spanned_predicates.into_iter().collect();
@ -863,7 +799,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.on_unimplemented_note(trait_ref, &obligation);
(message, label)
})
.unwrap_or((None, None))
.unwrap()
} else {
(None, None)
};
@ -972,7 +908,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If the method name is the name of a field with a function or closure type,
// give a helping note that it has to be called as `(x.f)(...)`.
if let SelfSource::MethodCall(expr) = source {
if !self.suggest_field_call(span, rcvr_ty, expr, item_name, &mut err)
if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err)
&& lev_candidate.is_none()
&& !custom_span_label
{
@ -982,10 +918,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
label_span_not_found(&mut err);
}
// Don't suggest (for example) `expr.field.method()` if `expr.method()`
// doesn't exist due to unsatisfied predicates.
// Don't suggest (for example) `expr.field.clone()` if `expr.clone()`
// can't be called due to `typeof(expr): Clone` not holding.
if unsatisfied_predicates.is_empty() {
self.check_for_field_method(&mut err, source, span, actual, item_name);
self.suggest_calling_method_on_field(&mut err, source, span, actual, item_name);
}
self.check_for_inner_self(&mut err, source, span, actual, item_name);
@ -1007,7 +943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
source,
out_of_scope_traits,
&unsatisfied_predicates,
&static_sources,
&static_candidates,
unsatisfied_bounds,
);
}
@ -1146,7 +1082,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None
}
fn suggest_field_call(
/// Suggest calling a field with a type that implements the `Fn*` traits instead of a method with
/// the same name as the field i.e. `(a.my_fn_ptr)(10)` instead of `a.my_fn_ptr(10)`.
fn suggest_calling_field_as_fn(
&self,
span: Span,
rcvr_ty: Ty<'tcx>,
@ -1408,7 +1346,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
false
}
fn check_for_field_method(
/// Suggest calling a method on a field i.e. `a.field.bar()` instead of `a.bar()`
fn suggest_calling_method_on_field(
&self,
err: &mut Diagnostic,
source: SelfSource<'tcx>,
@ -2021,7 +1960,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let mut alt_rcvr_sugg = false;
if let (SelfSource::MethodCall(rcvr), false) = (source, unsatisfied_bounds) {
debug!(?span, ?item_name, ?rcvr_ty, ?rcvr);
debug!(
"suggest_traits_to_import: span={:?}, item_name={:?}, rcvr_ty={:?}, rcvr={:?}",
span, item_name, rcvr_ty, rcvr
);
let skippable = [
self.tcx.lang_items().clone_trait(),
self.tcx.lang_items().deref_trait(),
@ -2060,7 +2002,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// suggestions are generally misleading (see #94218).
break;
}
_ => {}
Err(_) => (),
}
for (rcvr_ty, pre) in &[

View file

@ -203,13 +203,20 @@ pub struct UnifyReceiverContext<'tcx> {
pub substs: SubstsRef<'tcx>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift, Default)]
#[derive(Clone, PartialEq, Eq, Hash, Lift, Default)]
pub struct InternedObligationCauseCode<'tcx> {
/// `None` for `ObligationCauseCode::MiscObligation` (a common case, occurs ~60% of
/// the time). `Some` otherwise.
code: Option<Lrc<ObligationCauseCode<'tcx>>>,
}
impl<'tcx> std::fmt::Debug for InternedObligationCauseCode<'tcx> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let cause: &ObligationCauseCode<'_> = self;
cause.fmt(f)
}
}
impl<'tcx> ObligationCauseCode<'tcx> {
#[inline(always)]
fn into(self) -> InternedObligationCauseCode<'tcx> {

View file

@ -20,6 +20,7 @@ use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_macros::HashStable;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi;
use std::borrow::Cow;
@ -1282,6 +1283,12 @@ impl<'tcx> ParamTy {
pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.mk_ty_param(self.index, self.name)
}
pub fn span_from_generics(&self, tcx: TyCtxt<'tcx>, item_with_generics: DefId) -> Span {
let generics = tcx.generics_of(item_with_generics);
let type_param = generics.type_param(self, tcx);
tcx.def_span(type_param.def_id)
}
}
#[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)]

View file

@ -4,6 +4,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::hir_id::HirId;
use rustc_hir::intravisit;
use rustc_hir::{BlockCheckMode, ExprKind, Node};
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::query::Providers;
@ -517,24 +518,48 @@ pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) {
for &UnsafetyViolation { source_info, lint_root, kind, details } in violations.iter() {
let (description, note) = details.description_and_note();
// Report an error.
let unsafe_fn_msg =
if unsafe_op_in_unsafe_fn_allowed(tcx, lint_root) { " function or" } else { "" };
match kind {
UnsafetyViolationKind::General => {
// once
struct_span_err!(
let unsafe_fn_msg = if unsafe_op_in_unsafe_fn_allowed(tcx, lint_root) {
" function or"
} else {
""
};
let mut err = struct_span_err!(
tcx.sess,
source_info.span,
E0133,
"{} is unsafe and requires unsafe{} block",
description,
unsafe_fn_msg,
)
.span_label(source_info.span, description)
.note(note)
.emit();
);
err.span_label(source_info.span, description).note(note);
let note_non_inherited = tcx.hir().parent_iter(lint_root).find(|(id, node)| {
if let Node::Expr(block) = node
&& let ExprKind::Block(block, _) = block.kind
&& let BlockCheckMode::UnsafeBlock(_) = block.rules
{
true
}
else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id)
&& sig.header.is_unsafe()
{
true
} else {
false
}
});
if let Some((id, _)) = note_non_inherited {
let span = tcx.hir().span(id);
err.span_label(
tcx.sess.source_map().guess_head_span(span),
"items do not inherit unsafety from separate enclosing items",
);
}
err.emit();
}
UnsafetyViolationKind::UnsafeFn => tcx.struct_span_lint_hir(
UNSAFE_OP_IN_UNSAFE_FN,

View file

@ -4,7 +4,7 @@
use itertools::Itertools;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{pluralize, Applicability, MultiSpan};
use rustc_errors::MultiSpan;
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
@ -18,7 +18,10 @@ use rustc_session::lint;
use rustc_span::symbol::{sym, Symbol};
use std::mem;
use crate::errors::UselessAssignment;
use crate::errors::{
ChangeFieldsToBeOfUnitType, IgnoredDerivedImpls, MultipleDeadCodes, ParentInfo,
UselessAssignment,
};
// Any local node that may call something in its body block should be
// explored. For example, if it's a live Node::Item that is a
@ -693,99 +696,89 @@ impl<'tcx> DeadVisitor<'tcx> {
parent_item: Option<LocalDefId>,
is_positional: bool,
) {
if let Some(&first_id) = dead_codes.first() {
let tcx = self.tcx;
let names: Vec<_> = dead_codes
.iter()
.map(|&def_id| tcx.item_name(def_id.to_def_id()).to_string())
.collect();
let spans: Vec<_> = dead_codes
.iter()
.map(|&def_id| match tcx.def_ident_span(def_id) {
Some(s) => s.with_ctxt(tcx.def_span(def_id).ctxt()),
None => tcx.def_span(def_id),
let Some(&first_id) = dead_codes.first() else {
return;
};
let tcx = self.tcx;
let names: Vec<_> =
dead_codes.iter().map(|&def_id| tcx.item_name(def_id.to_def_id())).collect();
let spans: Vec<_> = dead_codes
.iter()
.map(|&def_id| match tcx.def_ident_span(def_id) {
Some(s) => s.with_ctxt(tcx.def_span(def_id).ctxt()),
None => tcx.def_span(def_id),
})
.collect();
let descr = tcx.def_kind(first_id).descr(first_id.to_def_id());
let num = dead_codes.len();
let multiple = num > 6;
let name_list = names.into();
let lint = if is_positional {
lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS
} else {
lint::builtin::DEAD_CODE
};
let parent_info = if let Some(parent_item) = parent_item {
let parent_descr = tcx.def_kind(parent_item).descr(parent_item.to_def_id());
Some(ParentInfo {
num,
descr,
parent_descr,
span: tcx.def_ident_span(parent_item).unwrap(),
})
} else {
None
};
let encl_def_id = parent_item.unwrap_or(first_id);
let ignored_derived_impls =
if let Some(ign_traits) = self.ignored_derived_traits.get(&encl_def_id) {
let trait_list = ign_traits
.iter()
.map(|(trait_id, _)| self.tcx.item_name(*trait_id))
.collect::<Vec<_>>();
let trait_list_len = trait_list.len();
Some(IgnoredDerivedImpls {
name: self.tcx.item_name(encl_def_id.to_def_id()),
trait_list: trait_list.into(),
trait_list_len,
})
.collect();
let descr = tcx.def_kind(first_id).descr(first_id.to_def_id());
let span_len = dead_codes.len();
let names = match &names[..] {
_ if span_len > 6 => String::new(),
[name] => format!("`{name}` "),
[names @ .., last] => {
format!(
"{} and `{last}` ",
names.iter().map(|name| format!("`{name}`")).join(", ")
)
}
[] => unreachable!(),
} else {
None
};
let msg = format!(
"{these}{descr}{s} {names}{are} never {participle}",
these = if span_len > 6 { "multiple " } else { "" },
s = pluralize!(span_len),
are = pluralize!("is", span_len),
);
tcx.struct_span_lint_hir(
if is_positional {
lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS
} else {
lint::builtin::DEAD_CODE
},
tcx.hir().local_def_id_to_hir_id(first_id),
MultiSpan::from_spans(spans.clone()),
msg,
|err| {
if is_positional {
err.multipart_suggestion(
&format!(
"consider changing the field{s} to be of unit type to \
suppress this warning while preserving the field \
numbering, or remove the field{s}",
s = pluralize!(span_len)
),
spans.iter().map(|sp| (*sp, "()".to_string())).collect(),
// "HasPlaceholders" because applying this fix by itself isn't
// enough: All constructor calls have to be adjusted as well
Applicability::HasPlaceholders,
);
}
let diag = if is_positional {
MultipleDeadCodes::UnusedTupleStructFields {
multiple,
num,
descr,
participle,
name_list,
change_fields_suggestion: ChangeFieldsToBeOfUnitType { num, spans: spans.clone() },
parent_info,
ignored_derived_impls,
}
} else {
MultipleDeadCodes::DeadCodes {
multiple,
num,
descr,
participle,
name_list,
parent_info,
ignored_derived_impls,
}
};
if let Some(parent_item) = parent_item {
let parent_descr = tcx.def_kind(parent_item).descr(parent_item.to_def_id());
err.span_label(
tcx.def_ident_span(parent_item).unwrap(),
format!("{descr}{s} in this {parent_descr}", s = pluralize!(span_len)),
);
}
let encl_def_id = parent_item.unwrap_or(first_id);
if let Some(ign_traits) = self.ignored_derived_traits.get(&encl_def_id) {
let traits_str = ign_traits
.iter()
.map(|(trait_id, _)| format!("`{}`", self.tcx.item_name(*trait_id)))
.collect::<Vec<_>>()
.join(" and ");
let plural_s = pluralize!(ign_traits.len());
let article = if ign_traits.len() > 1 { "" } else { "a " };
let is_are = if ign_traits.len() > 1 { "these are" } else { "this is" };
let msg = format!(
"`{}` has {}derived impl{} for the trait{} {}, but {} \
intentionally ignored during dead code analysis",
self.tcx.item_name(encl_def_id.to_def_id()),
article,
plural_s,
plural_s,
traits_str,
is_are
);
err.note(&msg);
}
err
},
);
}
self.tcx.emit_spanned_lint(
lint,
tcx.hir().local_def_id_to_hir_id(first_id),
MultiSpan::from_spans(spans.clone()),
diag,
);
}
fn warn_dead_fields_and_variants(

View file

@ -4,7 +4,9 @@ use std::{
};
use rustc_ast::Label;
use rustc_errors::{error_code, Applicability, ErrorGuaranteed, IntoDiagnostic, MultiSpan};
use rustc_errors::{
error_code, Applicability, DiagnosticSymbolList, ErrorGuaranteed, IntoDiagnostic, MultiSpan,
};
use rustc_hir::{self as hir, ExprKind, Target};
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
use rustc_middle::ty::{MainDefinition, Ty};
@ -1446,3 +1448,59 @@ pub struct MissingConstErr {
#[label]
pub const_span: Span,
}
#[derive(LintDiagnostic)]
pub enum MultipleDeadCodes<'tcx> {
#[diag(passes_dead_codes)]
DeadCodes {
multiple: bool,
num: usize,
descr: &'tcx str,
participle: &'tcx str,
name_list: DiagnosticSymbolList,
#[subdiagnostic]
parent_info: Option<ParentInfo<'tcx>>,
#[subdiagnostic]
ignored_derived_impls: Option<IgnoredDerivedImpls>,
},
#[diag(passes_dead_codes)]
UnusedTupleStructFields {
multiple: bool,
num: usize,
descr: &'tcx str,
participle: &'tcx str,
name_list: DiagnosticSymbolList,
#[subdiagnostic]
change_fields_suggestion: ChangeFieldsToBeOfUnitType,
#[subdiagnostic]
parent_info: Option<ParentInfo<'tcx>>,
#[subdiagnostic]
ignored_derived_impls: Option<IgnoredDerivedImpls>,
},
}
#[derive(Subdiagnostic)]
#[label(passes_parent_info)]
pub struct ParentInfo<'tcx> {
pub num: usize,
pub descr: &'tcx str,
pub parent_descr: &'tcx str,
#[primary_span]
pub span: Span,
}
#[derive(Subdiagnostic)]
#[note(passes_ignored_derived_impls)]
pub struct IgnoredDerivedImpls {
pub name: Symbol,
pub trait_list: DiagnosticSymbolList,
pub trait_list_len: usize,
}
#[derive(Subdiagnostic)]
#[multipart_suggestion(passes_change_fields_to_be_of_unit_type, applicability = "has-placeholders")]
pub struct ChangeFieldsToBeOfUnitType {
pub num: usize,
#[suggestion_part(code = "()")]
pub spans: Vec<Span>,
}

View file

@ -27,6 +27,7 @@ pub struct OnUnimplementedDirective {
}
#[derive(Default)]
/// For the `#[rustc_on_unimplemented]` attribute
pub struct OnUnimplementedNote {
pub message: Option<String>,
pub label: Option<String>,

View file

@ -13,7 +13,7 @@ doc = false
[dependencies]
alloc = { path = "../alloc" }
cfg-if = { version = "0.1.8", features = ['rustc-dep-of-std'] }
cfg-if = { version = "1.0", features = ['rustc-dep-of-std'] }
core = { path = "../core" }
libc = { version = "0.2", default-features = false }
compiler_builtins = "0.1.0"

View file

@ -17,4 +17,4 @@ core = { path = "../core" }
libc = { version = "0.2", default-features = false }
unwind = { path = "../unwind" }
compiler_builtins = "0.1.0"
cfg-if = "0.1.8"
cfg-if = "1.0"

View file

@ -40,7 +40,7 @@ pub mod test {
cli::{parse_opts, TestOpts},
filter_tests,
helpers::metrics::{Metric, MetricMap},
options::{Concurrent, Options, RunIgnored, RunStrategy, ShouldPanic},
options::{Options, RunIgnored, RunStrategy, ShouldPanic},
run_test, test_main, test_main_static,
test_result::{TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk},
time::{TestExecTime, TestTimeOptions},
@ -85,7 +85,7 @@ use event::{CompletedTest, TestEvent};
use helpers::concurrency::get_concurrency;
use helpers::exit_code::get_exit_code;
use helpers::shuffle::{get_shuffle_seed, shuffle_tests};
use options::{Concurrent, RunStrategy};
use options::RunStrategy;
use test_result::*;
use time::TestExecTime;
@ -267,6 +267,19 @@ where
join_handle: Option<thread::JoinHandle<()>>,
}
impl RunningTest {
fn join(self, completed_test: &mut CompletedTest) {
if let Some(join_handle) = self.join_handle {
if let Err(_) = join_handle.join() {
if let TrOk = completed_test.result {
completed_test.result =
TrFailedMsg("panicked after reporting success".to_string());
}
}
}
}
}
// Use a deterministic hasher
type TestMap =
HashMap<TestId, RunningTest, BuildHasherDefault<collections::hash_map::DefaultHasher>>;
@ -366,10 +379,10 @@ where
let (id, test) = remaining.pop_front().unwrap();
let event = TestEvent::TeWait(test.desc.clone());
notify_about_test_event(event)?;
let join_handle =
run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone(), Concurrent::No);
assert!(join_handle.is_none());
let completed_test = rx.recv().unwrap();
let join_handle = run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
// Wait for the test to complete.
let mut completed_test = rx.recv().unwrap();
RunningTest { join_handle }.join(&mut completed_test);
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
@ -383,15 +396,8 @@ where
let event = TestEvent::TeWait(desc.clone());
notify_about_test_event(event)?; //here no pad
let join_handle = run_test(
opts,
!opts.run_tests,
id,
test,
run_strategy,
tx.clone(),
Concurrent::Yes,
);
let join_handle =
run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone());
running_tests.insert(id, RunningTest { join_handle });
timeout_queue.push_back(TimeoutEntry { id, desc, timeout });
pending += 1;
@ -423,14 +429,7 @@ where
let mut completed_test = res.unwrap();
let running_test = running_tests.remove(&completed_test.id).unwrap();
if let Some(join_handle) = running_test.join_handle {
if let Err(_) = join_handle.join() {
if let TrOk = completed_test.result {
completed_test.result =
TrFailedMsg("panicked after reporting success".to_string());
}
}
}
running_test.join(&mut completed_test);
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
@ -443,8 +442,10 @@ where
for (id, b) in filtered.benchs {
let event = TestEvent::TeWait(b.desc.clone());
notify_about_test_event(event)?;
run_test(opts, false, id, b, run_strategy, tx.clone(), Concurrent::No);
let completed_test = rx.recv().unwrap();
let join_handle = run_test(opts, false, id, b, run_strategy, tx.clone());
// Wait for the test to complete.
let mut completed_test = rx.recv().unwrap();
RunningTest { join_handle }.join(&mut completed_test);
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
@ -520,7 +521,6 @@ pub fn run_test(
test: TestDescAndFn,
strategy: RunStrategy,
monitor_ch: Sender<CompletedTest>,
concurrency: Concurrent,
) -> Option<thread::JoinHandle<()>> {
let TestDescAndFn { desc, testfn } = test;
@ -538,7 +538,6 @@ pub fn run_test(
struct TestRunOpts {
pub strategy: RunStrategy,
pub nocapture: bool,
pub concurrency: Concurrent,
pub time: Option<time::TestTimeOptions>,
}
@ -549,7 +548,6 @@ pub fn run_test(
testfn: Box<dyn FnOnce() -> Result<(), String> + Send>,
opts: TestRunOpts,
) -> Option<thread::JoinHandle<()>> {
let concurrency = opts.concurrency;
let name = desc.name.clone();
let runtest = move || match opts.strategy {
@ -576,7 +574,7 @@ pub fn run_test(
// the test synchronously, regardless of the concurrency
// level.
let supports_threads = !cfg!(target_os = "emscripten") && !cfg!(target_family = "wasm");
if concurrency == Concurrent::Yes && supports_threads {
if supports_threads {
let cfg = thread::Builder::new().name(name.as_slice().to_owned());
let mut runtest = Arc::new(Mutex::new(Some(runtest)));
let runtest2 = runtest.clone();
@ -597,7 +595,7 @@ pub fn run_test(
}
let test_run_opts =
TestRunOpts { strategy, nocapture: opts.nocapture, concurrency, time: opts.time_options };
TestRunOpts { strategy, nocapture: opts.nocapture, time: opts.time_options };
match testfn {
DynBenchFn(benchfn) => {

View file

@ -1,12 +1,5 @@
//! Enums denoting options for test execution.
/// Whether to execute tests concurrently or not
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Concurrent {
Yes,
No,
}
/// Number of times to run a benchmarked function
#[derive(Clone, PartialEq, Eq)]
pub enum BenchMode {

View file

@ -102,7 +102,7 @@ pub fn do_not_run_ignored_tests() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_ne!(result, TrOk);
}
@ -125,7 +125,7 @@ pub fn ignored_tests_result_in_ignored() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrIgnored);
}
@ -150,7 +150,7 @@ fn test_should_panic() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrOk);
}
@ -175,7 +175,7 @@ fn test_should_panic_good_message() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrOk);
}
@ -205,7 +205,7 @@ fn test_should_panic_bad_message() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrFailedMsg(failed_msg.to_string()));
}
@ -239,7 +239,7 @@ fn test_should_panic_non_string_message_type() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(result, TrFailedMsg(failed_msg));
}
@ -267,15 +267,7 @@ fn test_should_panic_but_succeeds() {
testfn: DynTestFn(Box::new(f)),
};
let (tx, rx) = channel();
run_test(
&TestOpts::new(),
false,
TestId(0),
desc,
RunStrategy::InProcess,
tx,
Concurrent::No,
);
run_test(&TestOpts::new(), false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
assert_eq!(
result,
@ -306,7 +298,7 @@ fn report_time_test_template(report_time: bool) -> Option<TestExecTime> {
let test_opts = TestOpts { time_options, ..TestOpts::new() };
let (tx, rx) = channel();
run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx);
let exec_time = rx.recv().unwrap().exec_time;
exec_time
}
@ -345,7 +337,7 @@ fn time_test_failure_template(test_type: TestType) -> TestResult {
let test_opts = TestOpts { time_options: Some(time_options), ..TestOpts::new() };
let (tx, rx) = channel();
run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx, Concurrent::No);
run_test(&test_opts, false, TestId(0), desc, RunStrategy::InProcess, tx);
let result = rx.recv().unwrap().result;
result

View file

@ -17,7 +17,7 @@ doc = false
core = { path = "../core" }
libc = { version = "0.2.79", features = ['rustc-dep-of-std'], default-features = false }
compiler_builtins = "0.1.0"
cfg-if = "0.1.8"
cfg-if = "1.0"
[build-dependencies]
cc = "1.0.69"

View file

@ -2,7 +2,7 @@
{ "type": "test", "event": "started", "name": "a" }
{ "type": "test", "name": "a", "event": "ok" }
{ "type": "test", "event": "started", "name": "b" }
{ "type": "test", "name": "b", "event": "failed", "stdout": "thread 'main' panicked at 'assertion failed: false', f.rs:9:5\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n" }
{ "type": "test", "name": "b", "event": "failed", "stdout": "thread 'b' panicked at 'assertion failed: false', f.rs:9:5\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n" }
{ "type": "test", "event": "started", "name": "c" }
{ "type": "test", "name": "c", "event": "ok" }
{ "type": "test", "event": "started", "name": "d" }

View file

@ -2,9 +2,9 @@
{ "type": "test", "event": "started", "name": "a" }
{ "type": "test", "name": "a", "event": "ok", "stdout": "print from successful test\n" }
{ "type": "test", "event": "started", "name": "b" }
{ "type": "test", "name": "b", "event": "failed", "stdout": "thread 'main' panicked at 'assertion failed: false', f.rs:9:5\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n" }
{ "type": "test", "name": "b", "event": "failed", "stdout": "thread 'b' panicked at 'assertion failed: false', f.rs:9:5\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\n" }
{ "type": "test", "event": "started", "name": "c" }
{ "type": "test", "name": "c", "event": "ok", "stdout": "thread 'main' panicked at 'assertion failed: false', f.rs:15:5\n" }
{ "type": "test", "name": "c", "event": "ok", "stdout": "thread 'c' panicked at 'assertion failed: false', f.rs:15:5\n" }
{ "type": "test", "event": "started", "name": "d" }
{ "type": "test", "name": "d", "event": "ignored", "message": "msg" }
{ "type": "suite", "event": "failed", "passed": 2, "failed": 1, "ignored": 1, "measured": 0, "filtered_out": 0, "exec_time": $TIME }

View file

@ -0,0 +1,16 @@
// only-aarch64
// run-pass
// needs-asm-support
// Test that we properly work around this LLVM issue:
// https://github.com/llvm/llvm-project/issues/58384
use std::arch::asm;
fn main() {
let a: i32;
unsafe {
asm!("", inout("x0") 435 => a);
}
assert_eq!(a, 435);
}

View file

@ -3,7 +3,7 @@
#[derive(Debug)]
pub struct Whatever {
pub field0: (),
field1: (), //~ ERROR fields `field1`, `field2`, `field3` and `field4` are never read
field1: (), //~ ERROR fields `field1`, `field2`, `field3`, and `field4` are never read
field2: (),
field3: (),
field4: (),

View file

@ -1,4 +1,4 @@
error: fields `field1`, `field2`, `field3` and `field4` are never read
error: fields `field1`, `field2`, `field3`, and `field4` are never read
--> $DIR/clone-debug-dead-code-in-the-same-struct.rs:6:5
|
LL | pub struct Whatever {

View file

@ -7,7 +7,7 @@ struct Bar {
b: usize, //~ ERROR field `b` is never read
#[deny(dead_code)]
c: usize, //~ ERROR fields `c` and `e` are never read
d: usize, //~ WARN fields `d`, `f` and `g` are never read
d: usize, //~ WARN fields `d`, `f`, and `g` are never read
#[deny(dead_code)]
e: usize,
f: usize,

View file

@ -1,4 +1,4 @@
warning: fields `d`, `f` and `g` are never read
warning: fields `d`, `f`, and `g` are never read
--> $DIR/multiple-dead-codes-in-the-same-struct.rs:10:5
|
LL | struct Bar {

View file

@ -11,7 +11,7 @@ struct SingleUnused(i32, [u8; LEN], String);
//~| HELP: consider changing the field to be of unit type
struct MultipleUnused(i32, f32, String, u8);
//~^ ERROR: fields `0`, `1`, `2` and `3` are never read
//~^ ERROR: fields `0`, `1`, `2`, and `3` are never read
//~| NOTE: fields in this struct
//~| HELP: consider changing the fields to be of unit type

View file

@ -16,7 +16,7 @@ help: consider changing the field to be of unit type to suppress this warning wh
LL | struct SingleUnused(i32, (), String);
| ~~
error: fields `0`, `1`, `2` and `3` are never read
error: fields `0`, `1`, `2`, and `3` are never read
--> $DIR/tuple-struct-field.rs:13:23
|
LL | struct MultipleUnused(i32, f32, String, u8);

View file

@ -10,7 +10,7 @@ fee
fie
foe
fum
thread 'main' panicked at 'explicit panic', $DIR/test-thread-capture.rs:32:5
thread 'thready_fail' panicked at 'explicit panic', $DIR/test-thread-capture.rs:32:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace

View file

@ -1,2 +1,2 @@
thread 'main' panicked at 'explicit panic', $DIR/test-thread-nocapture.rs:32:5
thread 'thready_fail' panicked at 'explicit panic', $DIR/test-thread-nocapture.rs:32:5
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace

View file

@ -0,0 +1,26 @@
#![allow(unused, dead_code)]
static mut FOO: u64 = 0;
fn static_mod() {
unsafe {static BAR: u64 = FOO;}
//~^ ERROR: use of mutable static is unsafe
//~| NOTE: use of mutable static
//~| NOTE: mutable statics can be mutated by multiple threads
//~| NOTE: items do not inherit unsafety
}
unsafe fn unsafe_call() {}
fn foo() {
unsafe {
//~^ NOTE: items do not inherit unsafety
fn bar() {
unsafe_call();
//~^ ERROR: call to unsafe function
//~| NOTE: call to unsafe function
//~| NOTE: consult the function's documentation
}
}
}
fn main() {}

View file

@ -0,0 +1,24 @@
error[E0133]: use of mutable static is unsafe and requires unsafe function or block
--> $DIR/unsafe-not-inherited.rs:6:31
|
LL | unsafe {static BAR: u64 = FOO;}
| ------ ^^^ use of mutable static
| |
| items do not inherit unsafety from separate enclosing items
|
= note: mutable statics can be mutated by multiple threads: aliasing violations or data races will cause undefined behavior
error[E0133]: call to unsafe function is unsafe and requires unsafe function or block
--> $DIR/unsafe-not-inherited.rs:18:13
|
LL | unsafe {
| ------ items do not inherit unsafety from separate enclosing items
...
LL | unsafe_call();
| ^^^^^^^^^^^^^ call to unsafe function
|
= note: consult the function's documentation for information on how to avoid undefined behavior
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0133`.

View file

@ -10,6 +10,7 @@ regex = "1"
miropt-test-tools = { path = "../miropt-test-tools" }
lazy_static = "1"
walkdir = "2"
ignore = "0.4.18"
[[bin]]
name = "rust-tidy"

View file

@ -2,6 +2,8 @@
//! - the number of entries in each directory must be less than `ENTRY_LIMIT`
//! - there are no stray `.stderr` files
use ignore::Walk;
use ignore::WalkBuilder;
use std::fs;
use std::path::Path;
@ -11,34 +13,39 @@ const ROOT_ENTRY_LIMIT: usize = 941;
const ISSUES_ENTRY_LIMIT: usize = 2117;
fn check_entries(path: &Path, bad: &mut bool) {
let dirs = walkdir::WalkDir::new(&path.join("test/ui"))
.into_iter()
.filter_entry(|e| e.file_type().is_dir());
for dir in dirs {
if let Ok(dir) = dir {
let dir_path = dir.path();
for dir in Walk::new(&path.join("test/ui")) {
if let Ok(entry) = dir {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let dir_path = entry.path();
// Use special values for these dirs.
let is_root = path.join("test/ui") == dir_path;
let is_issues_dir = path.join("test/ui/issues") == dir_path;
let limit = if is_root {
ROOT_ENTRY_LIMIT
} else if is_issues_dir {
ISSUES_ENTRY_LIMIT
} else {
ENTRY_LIMIT
};
// Use special values for these dirs.
let is_root = path.join("test/ui") == dir_path;
let is_issues_dir = path.join("test/ui/issues") == dir_path;
let limit = if is_root {
ROOT_ENTRY_LIMIT
} else if is_issues_dir {
ISSUES_ENTRY_LIMIT
} else {
ENTRY_LIMIT
};
let count = WalkBuilder::new(&dir_path)
.max_depth(Some(1))
.build()
.into_iter()
.collect::<Vec<_>>()
.len()
- 1; // remove the dir itself
let count = std::fs::read_dir(dir_path).unwrap().count();
if count > limit {
tidy_error!(
bad,
"following path contains more than {} entries, \
you should move the test to some relevant subdirectory (current: {}): {}",
limit,
count,
dir_path.display()
);
if count > limit {
tidy_error!(
bad,
"following path contains more than {} entries, \
you should move the test to some relevant subdirectory (current: {}): {}",
limit,
count,
dir_path.display()
);
}
}
}
}