1
Fork 0

Auto merge of #91760 - matthiaskrgr:rollup-zcemh6j, r=matthiaskrgr

Rollup of 10 pull requests

Successful merges:

 - #90407 (Document all public items in `rustc_incremental`)
 - #90897 (Fix incorrect stability attributes)
 - #91105 (Fix method name reference in stream documentation)
 - #91325 (adjust const_eval_select documentation)
 - #91470 (code-cov: generate dead functions with private/default linkage)
 - #91482 (Update documentation to use `from()` to initialize `HashMap`s and `BTreeMap`s)
 - #91524 (Fix Vec::extend_from_slice docs)
 - #91575 (Fix ICE on format string of macro with secondary-label)
 - #91625 (Remove redundant [..]s)
 - #91646 (Fix documentation for `core::ready::Ready`)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2021-12-10 21:59:07 +00:00
commit f0448f44bc
67 changed files with 424 additions and 224 deletions

View file

@ -57,7 +57,7 @@ impl LitKind {
// string in the token. // string in the token.
let s = symbol.as_str(); let s = symbol.as_str();
let symbol = let symbol =
if s.contains(&['\\', '\r'][..]) { if s.contains(&['\\', '\r']) {
let mut buf = String::with_capacity(s.len()); let mut buf = String::with_capacity(s.len());
let mut error = Ok(()); let mut error = Ok(());
unescape_literal(&s, Mode::Str, &mut |_, unescaped_char| { unescape_literal(&s, Mode::Str, &mut |_, unescaped_char| {

View file

@ -347,7 +347,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
if let Some(modifiers) = nested_meta.value_str() { if let Some(modifiers) = nested_meta.value_str() {
for modifier in modifiers.as_str().split(',') { for modifier in modifiers.as_str().split(',') {
if let Some(modifier) = modifier.strip_prefix(&['+', '-'][..]) { if let Some(modifier) = modifier.strip_prefix(&['+', '-']) {
macro_rules! gate_modifier { ($($name:literal => $feature:ident)*) => { macro_rules! gate_modifier { ($($name:literal => $feature:ident)*) => {
$(if modifier == $name { $(if modifier == $name {
let msg = concat!("`#[link(modifiers=\"", $name, "\")]` is unstable"); let msg = concat!("`#[link(modifiers=\"", $name, "\")]` is unstable");
@ -383,7 +383,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
} }
ast::ItemKind::Fn(..) => { ast::ItemKind::Fn(..) => {
if self.sess.contains_name(&i.attrs[..], sym::start) { if self.sess.contains_name(&i.attrs, sym::start) {
gate_feature_post!( gate_feature_post!(
&self, &self,
start, start,
@ -396,7 +396,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
} }
ast::ItemKind::Struct(..) => { ast::ItemKind::Struct(..) => {
for attr in self.sess.filter_by_name(&i.attrs[..], sym::repr) { for attr in self.sess.filter_by_name(&i.attrs, sym::repr) {
for item in attr.meta_item_list().unwrap_or_else(Vec::new) { for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.has_name(sym::simd) { if item.has_name(sym::simd) {
gate_feature_post!( gate_feature_post!(

View file

@ -499,7 +499,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
ast::MetaItemKind::List(ref items) => { ast::MetaItemKind::List(ref items) => {
self.print_path(&item.path, false, 0); self.print_path(&item.path, false, 0);
self.popen(); self.popen();
self.commasep(Consistent, &items[..], |s, i| s.print_meta_list_item(i)); self.commasep(Consistent, &items, |s, i| s.print_meta_list_item(i));
self.pclose(); self.pclose();
} }
} }
@ -997,7 +997,7 @@ impl<'a> State<'a> {
} }
ast::TyKind::Tup(ref elts) => { ast::TyKind::Tup(ref elts) => {
self.popen(); self.popen();
self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(ty)); self.commasep(Inconsistent, &elts, |s, ty| s.print_type(ty));
if elts.len() == 1 { if elts.len() == 1 {
self.word(","); self.word(",");
} }
@ -1017,10 +1017,10 @@ impl<'a> State<'a> {
ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false), ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false),
ast::TyKind::TraitObject(ref bounds, syntax) => { ast::TyKind::TraitObject(ref bounds, syntax) => {
let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" }; let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" };
self.print_type_bounds(prefix, &bounds[..]); self.print_type_bounds(prefix, &bounds);
} }
ast::TyKind::ImplTrait(_, ref bounds) => { ast::TyKind::ImplTrait(_, ref bounds) => {
self.print_type_bounds("impl", &bounds[..]); self.print_type_bounds("impl", &bounds);
} }
ast::TyKind::Array(ref ty, ref length) => { ast::TyKind::Array(ref ty, ref length) => {
self.word("["); self.word("[");
@ -1339,7 +1339,7 @@ impl<'a> State<'a> {
real_bounds.push(b.clone()); real_bounds.push(b.clone());
} }
} }
self.print_type_bounds(":", &real_bounds[..]); self.print_type_bounds(":", &real_bounds);
self.print_where_clause(&generics.where_clause); self.print_where_clause(&generics.where_clause);
self.word(" "); self.word(" ");
self.bopen(); self.bopen();
@ -1368,7 +1368,7 @@ impl<'a> State<'a> {
} }
} }
self.nbsp(); self.nbsp();
self.print_type_bounds("=", &real_bounds[..]); self.print_type_bounds("=", &real_bounds);
self.print_where_clause(&generics.where_clause); self.print_where_clause(&generics.where_clause);
self.word(";"); self.word(";");
} }
@ -1960,10 +1960,10 @@ impl<'a> State<'a> {
self.print_expr_tup(exprs); self.print_expr_tup(exprs);
} }
ast::ExprKind::Call(ref func, ref args) => { ast::ExprKind::Call(ref func, ref args) => {
self.print_expr_call(func, &args[..]); self.print_expr_call(func, &args);
} }
ast::ExprKind::MethodCall(ref segment, ref args, _) => { ast::ExprKind::MethodCall(ref segment, ref args, _) => {
self.print_expr_method_call(segment, &args[..]); self.print_expr_method_call(segment, &args);
} }
ast::ExprKind::Binary(op, ref lhs, ref rhs) => { ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
self.print_expr_binary(op, lhs, rhs); self.print_expr_binary(op, lhs, rhs);
@ -2440,11 +2440,11 @@ impl<'a> State<'a> {
self.print_path(path, true, 0); self.print_path(path, true, 0);
} }
self.popen(); self.popen();
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
self.pclose(); self.pclose();
} }
PatKind::Or(ref pats) => { PatKind::Or(ref pats) => {
self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(p)); self.strsep("|", true, Inconsistent, &pats, |s, p| s.print_pat(p));
} }
PatKind::Path(None, ref path) => { PatKind::Path(None, ref path) => {
self.print_path(path, true, 0); self.print_path(path, true, 0);
@ -2462,7 +2462,7 @@ impl<'a> State<'a> {
self.word_space("{"); self.word_space("{");
self.commasep_cmnt( self.commasep_cmnt(
Consistent, Consistent,
&fields[..], &fields,
|s, f| { |s, f| {
s.cbox(INDENT_UNIT); s.cbox(INDENT_UNIT);
if !f.is_shorthand { if !f.is_shorthand {
@ -2485,7 +2485,7 @@ impl<'a> State<'a> {
} }
PatKind::Tuple(ref elts) => { PatKind::Tuple(ref elts) => {
self.popen(); self.popen();
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
if elts.len() == 1 { if elts.len() == 1 {
self.word(","); self.word(",");
} }
@ -2527,7 +2527,7 @@ impl<'a> State<'a> {
} }
PatKind::Slice(ref elts) => { PatKind::Slice(ref elts) => {
self.word("["); self.word("[");
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
self.word("]"); self.word("]");
} }
PatKind::Rest => self.word(".."), PatKind::Rest => self.word(".."),
@ -2836,7 +2836,7 @@ impl<'a> State<'a> {
self.print_path(&tree.prefix, false, 0); self.print_path(&tree.prefix, false, 0);
self.word("::{"); self.word("::{");
} }
self.commasep(Inconsistent, &items[..], |this, &(ref tree, _)| { self.commasep(Inconsistent, &items, |this, &(ref tree, _)| {
this.print_use_tree(tree) this.print_use_tree(tree)
}); });
self.word("}"); self.word("}");

View file

@ -712,7 +712,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
Some(&idx) => Some(idx), Some(&idx) => Some(idx),
None => { None => {
let msg = format!("there is no argument named `{}`", name); let msg = format!("there is no argument named `{}`", name);
ecx.struct_span_err(span, &msg[..]).emit(); ecx.struct_span_err(span, &msg).emit();
None None
} }
}, },

View file

@ -766,8 +766,8 @@ impl<'a> TraitDef<'a> {
self, self,
struct_def, struct_def,
type_ident, type_ident,
&self_args[..], &self_args,
&nonself_args[..], &nonself_args,
) )
} else { } else {
method_def.expand_struct_method_body( method_def.expand_struct_method_body(
@ -775,8 +775,8 @@ impl<'a> TraitDef<'a> {
self, self,
struct_def, struct_def,
type_ident, type_ident,
&self_args[..], &self_args,
&nonself_args[..], &nonself_args,
use_temporaries, use_temporaries,
) )
}; };
@ -815,8 +815,8 @@ impl<'a> TraitDef<'a> {
self, self,
enum_def, enum_def,
type_ident, type_ident,
&self_args[..], &self_args,
&nonself_args[..], &nonself_args,
) )
} else { } else {
method_def.expand_enum_method_body( method_def.expand_enum_method_body(
@ -825,7 +825,7 @@ impl<'a> TraitDef<'a> {
enum_def, enum_def,
type_ident, type_ident,
self_args, self_args,
&nonself_args[..], &nonself_args,
) )
}; };
@ -1217,7 +1217,7 @@ impl<'a> MethodDef<'a> {
let vi_idents = self_arg_names let vi_idents = self_arg_names
.iter() .iter()
.map(|name| { .map(|name| {
let vi_suffix = format!("{}_vi", &name[..]); let vi_suffix = format!("{}_vi", name);
Ident::from_str_and_span(&vi_suffix, span) Ident::from_str_and_span(&vi_suffix, span)
}) })
.collect::<Vec<Ident>>(); .collect::<Vec<Ident>>();
@ -1226,7 +1226,7 @@ impl<'a> MethodDef<'a> {
// delegated expression that handles the catch-all case, // delegated expression that handles the catch-all case,
// using `__variants_tuple` to drive logic if necessary. // using `__variants_tuple` to drive logic if necessary.
let catch_all_substructure = let catch_all_substructure =
EnumNonMatchingCollapsed(self_arg_idents, &variants[..], &vi_idents[..]); EnumNonMatchingCollapsed(self_arg_idents, &variants, &vi_idents);
let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty()); let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty());
@ -1261,7 +1261,7 @@ impl<'a> MethodDef<'a> {
idents idents
}; };
for self_arg_name in &self_arg_names[1..] { for self_arg_name in &self_arg_names[1..] {
let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); let (p, idents) = mk_self_pat(cx, &self_arg_name);
subpats.push(p); subpats.push(p);
self_pats_idents.push(idents); self_pats_idents.push(idents);
} }

View file

@ -549,7 +549,7 @@ impl<'a, 'b> Context<'a, 'b> {
} else { } else {
self.fmtsp self.fmtsp
}; };
let mut err = self.ecx.struct_span_err(sp, &msg[..]); let mut err = self.ecx.struct_span_err(sp, &msg);
err.note(&format!( err.note(&format!(
"did you intend to capture a variable `{}` from \ "did you intend to capture a variable `{}` from \
@ -995,8 +995,9 @@ pub fn expand_preparsed_format_args(
e.note(&note); e.note(&note);
} }
if let Some((label, span)) = err.secondary_label { if let Some((label, span)) = err.secondary_label {
let sp = fmt_span.from_inner(span); if efmt_kind_is_lit {
e.span_label(sp, label); e.span_label(fmt_span.from_inner(span), label);
}
} }
e.emit(); e.emit();
return DummyResult::raw_expr(sp, true); return DummyResult::raw_expr(sp, true);

View file

@ -32,7 +32,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
if config.emit_asm { if config.emit_asm {
let _timer = cgcx let _timer = cgcx
.prof .prof
.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &module.name[..]); .generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str")); context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str"));
} }
@ -41,7 +41,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
EmitObj::ObjectCode(_) => { EmitObj::ObjectCode(_) => {
let _timer = cgcx let _timer = cgcx
.prof .prof
.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &module.name[..]); .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
match &*module.name { match &*module.name {
"std_example.7rcbfp3g-cgu.15" => { "std_example.7rcbfp3g-cgu.15" => {
println!("Dumping reproducer {}", module.name); println!("Dumping reproducer {}", module.name);

View file

@ -477,7 +477,7 @@ pub(crate) fn inline_asm_call(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
debug!("Asm Output Type: {:?}", output); debug!("Asm Output Type: {:?}", output);
let fty = bx.cx.type_func(&argtys[..], output); let fty = bx.cx.type_func(&argtys, output);
unsafe { unsafe {
// Ask LLVM to verify that the constraints are well-formed. // Ask LLVM to verify that the constraints are well-formed.
let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons.as_ptr().cast(), cons.len()); let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons.as_ptr().cast(), cons.len());

View file

@ -587,7 +587,7 @@ pub(crate) fn run_pass_manager(
config: &ModuleConfig, config: &ModuleConfig,
thin: bool, thin: bool,
) -> Result<(), FatalError> { ) -> Result<(), FatalError> {
let _timer = cgcx.prof.extra_verbose_generic_activity("LLVM_lto_optimize", &module.name[..]); let _timer = cgcx.prof.extra_verbose_generic_activity("LLVM_lto_optimize", &*module.name);
// Now we have one massive module inside of llmod. Time to run the // Now we have one massive module inside of llmod. Time to run the
// LTO-specific optimization passes that LLVM provides. // LTO-specific optimization passes that LLVM provides.

View file

@ -510,7 +510,7 @@ pub(crate) unsafe fn optimize(
module: &ModuleCodegen<ModuleLlvm>, module: &ModuleCodegen<ModuleLlvm>,
config: &ModuleConfig, config: &ModuleConfig,
) -> Result<(), FatalError> { ) -> Result<(), FatalError> {
let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &module.name[..]); let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name);
let llmod = module.module_llvm.llmod(); let llmod = module.module_llvm.llmod();
let llcx = &*module.module_llvm.llcx; let llcx = &*module.module_llvm.llcx;
@ -663,14 +663,14 @@ pub(crate) unsafe fn optimize(
{ {
let _timer = cgcx.prof.extra_verbose_generic_activity( let _timer = cgcx.prof.extra_verbose_generic_activity(
"LLVM_module_optimize_function_passes", "LLVM_module_optimize_function_passes",
&module.name[..], &*module.name,
); );
llvm::LLVMRustRunFunctionPassManager(fpm, llmod); llvm::LLVMRustRunFunctionPassManager(fpm, llmod);
} }
{ {
let _timer = cgcx.prof.extra_verbose_generic_activity( let _timer = cgcx.prof.extra_verbose_generic_activity(
"LLVM_module_optimize_module_passes", "LLVM_module_optimize_module_passes",
&module.name[..], &*module.name,
); );
llvm::LLVMRunPassManager(mpm, llmod); llvm::LLVMRunPassManager(mpm, llmod);
} }
@ -733,7 +733,7 @@ pub(crate) unsafe fn codegen(
module: ModuleCodegen<ModuleLlvm>, module: ModuleCodegen<ModuleLlvm>,
config: &ModuleConfig, config: &ModuleConfig,
) -> Result<CompiledModule, FatalError> { ) -> Result<CompiledModule, FatalError> {
let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &module.name[..]); let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name);
{ {
let llmod = module.module_llvm.llmod(); let llmod = module.module_llvm.llmod();
let llcx = &*module.module_llvm.llcx; let llcx = &*module.module_llvm.llcx;
@ -782,7 +782,7 @@ pub(crate) unsafe fn codegen(
if config.bitcode_needed() { if config.bitcode_needed() {
let _timer = cgcx let _timer = cgcx
.prof .prof
.generic_activity_with_arg("LLVM_module_codegen_make_bitcode", &module.name[..]); .generic_activity_with_arg("LLVM_module_codegen_make_bitcode", &*module.name);
let thin = ThinBuffer::new(llmod); let thin = ThinBuffer::new(llmod);
let data = thin.data(); let data = thin.data();
@ -795,10 +795,9 @@ pub(crate) unsafe fn codegen(
} }
if config.emit_bc || config.emit_obj == EmitObj::Bitcode { if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
let _timer = cgcx.prof.generic_activity_with_arg( let _timer = cgcx
"LLVM_module_codegen_emit_bitcode", .prof
&module.name[..], .generic_activity_with_arg("LLVM_module_codegen_emit_bitcode", &*module.name);
);
if let Err(e) = fs::write(&bc_out, data) { if let Err(e) = fs::write(&bc_out, data) {
let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e); let msg = format!("failed to write bytecode to {}: {}", bc_out.display(), e);
diag_handler.err(&msg); diag_handler.err(&msg);
@ -806,18 +805,16 @@ pub(crate) unsafe fn codegen(
} }
if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) { if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) {
let _timer = cgcx.prof.generic_activity_with_arg( let _timer = cgcx
"LLVM_module_codegen_embed_bitcode", .prof
&module.name[..], .generic_activity_with_arg("LLVM_module_codegen_embed_bitcode", &*module.name);
);
embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data); embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data);
} }
} }
if config.emit_ir { if config.emit_ir {
let _timer = cgcx let _timer =
.prof cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name);
.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &module.name[..]);
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name); let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
let out_c = path_to_c_string(&out); let out_c = path_to_c_string(&out);
@ -866,9 +863,8 @@ pub(crate) unsafe fn codegen(
} }
if config.emit_asm { if config.emit_asm {
let _timer = cgcx let _timer =
.prof cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &module.name[..]);
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and object code output, // We can't use the same module for asm and object code output,
@ -898,7 +894,7 @@ pub(crate) unsafe fn codegen(
EmitObj::ObjectCode(_) => { EmitObj::ObjectCode(_) => {
let _timer = cgcx let _timer = cgcx
.prof .prof
.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &module.name[..]); .generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
let dwo_out = cgcx.output_filenames.temp_path_dwo(module_name); let dwo_out = cgcx.output_filenames.temp_path_dwo(module_name);
let dwo_out = match cgcx.split_debuginfo { let dwo_out = match cgcx.split_debuginfo {

View file

@ -120,7 +120,7 @@ impl CodegenCx<'ll, 'tcx> {
!null_terminated as Bool, !null_terminated as Bool,
); );
let sym = self.generate_local_symbol_name("str"); let sym = self.generate_local_symbol_name("str");
let g = self.define_global(&sym[..], self.val_ty(sc)).unwrap_or_else(|| { let g = self.define_global(&sym, self.val_ty(sc)).unwrap_or_else(|| {
bug!("symbol `{}` is already defined", sym); bug!("symbol `{}` is already defined", sym);
}); });
llvm::LLVMSetInitializer(g, sc); llvm::LLVMSetInitializer(g, sc);

View file

@ -225,7 +225,7 @@ impl CodegenCx<'ll, 'tcx> {
let gv = match kind { let gv = match kind {
Some(kind) if !self.tcx.sess.fewer_names() => { Some(kind) if !self.tcx.sess.fewer_names() => {
let name = self.generate_local_symbol_name(kind); let name = self.generate_local_symbol_name(kind);
let gv = self.define_global(&name[..], self.val_ty(cv)).unwrap_or_else(|| { let gv = self.define_global(&name, self.val_ty(cv)).unwrap_or_else(|| {
bug!("symbol `{}` is already defined", name); bug!("symbol `{}` is already defined", name);
}); });
llvm::LLVMRustSetLinkage(gv, llvm::Linkage::PrivateLinkage); llvm::LLVMRustSetLinkage(gv, llvm::Linkage::PrivateLinkage);

View file

@ -89,7 +89,7 @@ pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
}); });
let filenames_size = filenames_buffer.len(); let filenames_size = filenames_buffer.len();
let filenames_val = cx.const_bytes(&filenames_buffer[..]); let filenames_val = cx.const_bytes(&filenames_buffer);
let filenames_ref = coverageinfo::hash_bytes(filenames_buffer); let filenames_ref = coverageinfo::hash_bytes(filenames_buffer);
// Generate the LLVM IR representation of the coverage map and store it in a well-known global // Generate the LLVM IR representation of the coverage map and store it in a well-known global
@ -238,7 +238,7 @@ fn save_function_record(
) { ) {
// Concatenate the encoded coverage mappings // Concatenate the encoded coverage mappings
let coverage_mapping_size = coverage_mapping_buffer.len(); let coverage_mapping_size = coverage_mapping_buffer.len();
let coverage_mapping_val = cx.const_bytes(&coverage_mapping_buffer[..]); let coverage_mapping_val = cx.const_bytes(&coverage_mapping_buffer);
let func_name_hash = coverageinfo::hash_str(&mangled_function_name); let func_name_hash = coverageinfo::hash_str(&mangled_function_name);
let func_name_hash_val = cx.const_u64(func_name_hash); let func_name_hash_val = cx.const_u64(func_name_hash);

View file

@ -212,8 +212,8 @@ fn declare_unused_fn(cx: &CodegenCx<'ll, 'tcx>, def_id: &DefId) -> Instance<'tcx
), ),
); );
llvm::set_linkage(llfn, llvm::Linkage::WeakAnyLinkage); llvm::set_linkage(llfn, llvm::Linkage::PrivateLinkage);
llvm::set_visibility(llfn, llvm::Visibility::Hidden); llvm::set_visibility(llfn, llvm::Visibility::Default);
assert!(cx.instances.borrow_mut().insert(instance, llfn).is_none()); assert!(cx.instances.borrow_mut().insert(instance, llfn).is_none());

View file

@ -456,7 +456,7 @@ fn vec_slice_metadata(
let metadata = composite_type_metadata( let metadata = composite_type_metadata(
cx, cx,
slice_ptr_type, slice_ptr_type,
&slice_type_name[..], &slice_type_name,
unique_type_id, unique_type_id,
member_descriptions, member_descriptions,
NO_SCOPE_METADATA, NO_SCOPE_METADATA,
@ -579,7 +579,7 @@ fn trait_pointer_metadata(
composite_type_metadata( composite_type_metadata(
cx, cx,
trait_object_type.unwrap_or(trait_type), trait_object_type.unwrap_or(trait_type),
&trait_type_name[..], &trait_type_name,
unique_type_id, unique_type_id,
member_descriptions, member_descriptions,
containing_scope, containing_scope,
@ -2398,7 +2398,7 @@ fn set_members_of_composite_type(
let type_params = compute_type_parameters(cx, composite_type); let type_params = compute_type_parameters(cx, composite_type);
unsafe { unsafe {
let type_array = create_DIArray(DIB(cx), &member_metadata[..]); let type_array = create_DIArray(DIB(cx), &member_metadata);
llvm::LLVMRustDICompositeTypeReplaceArrays( llvm::LLVMRustDICompositeTypeReplaceArrays(
DIB(cx), DIB(cx),
composite_type_metadata, composite_type_metadata,
@ -2437,7 +2437,7 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> &'ll DIAr
}) })
.collect(); .collect();
return create_DIArray(DIB(cx), &template_params[..]); return create_DIArray(DIB(cx), &template_params);
} }
} }
return create_DIArray(DIB(cx), &[]); return create_DIArray(DIB(cx), &[]);

View file

@ -474,7 +474,7 @@ impl DebugInfoMethods<'tcx> for CodegenCx<'ll, 'tcx> {
vec![] vec![]
}; };
create_DIArray(DIB(cx), &template_params[..]) create_DIArray(DIB(cx), &template_params)
} }
fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> { fn get_parameter_names(cx: &CodegenCx<'_, '_>, generics: &ty::Generics) -> Vec<Symbol> {

View file

@ -677,11 +677,11 @@ impl<B: WriteBackendMethods> WorkItem<B> {
fn start_profiling<'a>(&self, cgcx: &'a CodegenContext<B>) -> TimingGuard<'a> { fn start_profiling<'a>(&self, cgcx: &'a CodegenContext<B>) -> TimingGuard<'a> {
match *self { match *self {
WorkItem::Optimize(ref m) => { WorkItem::Optimize(ref m) => {
cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &m.name[..]) cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &*m.name)
} }
WorkItem::CopyPostLtoArtifacts(ref m) => cgcx WorkItem::CopyPostLtoArtifacts(ref m) => cgcx
.prof .prof
.generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &m.name[..]), .generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &*m.name),
WorkItem::LTO(ref m) => { WorkItem::LTO(ref m) => {
cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", m.name()) cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", m.name())
} }

View file

@ -122,8 +122,8 @@ pub fn langcall(tcx: TyCtxt<'_>, span: Option<Span>, msg: &str, li: LangItem) ->
tcx.lang_items().require(li).unwrap_or_else(|s| { tcx.lang_items().require(li).unwrap_or_else(|s| {
let msg = format!("{} {}", msg, s); let msg = format!("{} {}", msg, s);
match span { match span {
Some(span) => tcx.sess.span_fatal(span, &msg[..]), Some(span) => tcx.sess.span_fatal(span, &msg),
None => tcx.sess.fatal(&msg[..]), None => tcx.sess.fatal(&msg),
} }
}) })
} }

View file

@ -103,7 +103,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.eval_fn_call( self.eval_fn_call(
fn_val, fn_val,
abi, abi,
&args[..], &args,
ret, ret,
match (cleanup, caller_can_unwind) { match (cleanup, caller_can_unwind) {
(Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup), (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),

View file

@ -142,7 +142,7 @@ impl_stable_hash_via_hash!(Fingerprint);
impl<E: rustc_serialize::Encoder> Encodable<E> for Fingerprint { impl<E: rustc_serialize::Encoder> Encodable<E> for Fingerprint {
#[inline] #[inline]
fn encode(&self, s: &mut E) -> Result<(), E::Error> { fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_raw_bytes(&self.to_le_bytes()[..])?; s.emit_raw_bytes(&self.to_le_bytes())?;
Ok(()) Ok(())
} }
} }
@ -151,7 +151,7 @@ impl<D: rustc_serialize::Decoder> Decodable<D> for Fingerprint {
#[inline] #[inline]
fn decode(d: &mut D) -> Result<Self, D::Error> { fn decode(d: &mut D) -> Result<Self, D::Error> {
let mut bytes = [0u8; 16]; let mut bytes = [0u8; 16];
d.read_raw_bytes_into(&mut bytes[..])?; d.read_raw_bytes_into(&mut bytes)?;
Ok(Fingerprint::from_le_bytes(bytes)) Ok(Fingerprint::from_le_bytes(bytes))
} }
} }

View file

@ -649,7 +649,7 @@ impl Drop for VerboseTimingGuard<'_> {
fn drop(&mut self) { fn drop(&mut self) {
if let Some((start_time, start_rss, ref message)) = self.start_and_message { if let Some((start_time, start_rss, ref message)) = self.start_and_message {
let end_rss = get_resident_set_size(); let end_rss = get_resident_set_size();
print_time_passes_entry(&message[..], start_time.elapsed(), start_rss, end_rss); print_time_passes_entry(&message, start_time.elapsed(), start_rss, end_rss);
} }
} }
} }

View file

@ -46,7 +46,7 @@ impl SmallCStr {
#[inline] #[inline]
pub fn as_c_str(&self) -> &ffi::CStr { pub fn as_c_str(&self) -> &ffi::CStr {
unsafe { ffi::CStr::from_bytes_with_nul_unchecked(&self.data[..]) } unsafe { ffi::CStr::from_bytes_with_nul_unchecked(&self.data) }
} }
#[inline] #[inline]

View file

@ -253,7 +253,7 @@ fn generic_extension<'cx>(
for (i, lhs) in lhses.iter().enumerate() { for (i, lhs) in lhses.iter().enumerate() {
// try each arm's matchers // try each arm's matchers
let lhs_tt = match *lhs { let lhs_tt = match *lhs {
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..], mbe::TokenTree::Delimited(_, ref delim) => &delim.tts,
_ => cx.span_bug(sp, "malformed macro lhs"), _ => cx.span_bug(sp, "malformed macro lhs"),
}; };
@ -353,7 +353,7 @@ fn generic_extension<'cx>(
for lhs in lhses { for lhs in lhses {
// try each arm's matchers // try each arm's matchers
let lhs_tt = match *lhs { let lhs_tt = match *lhs {
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..], mbe::TokenTree::Delimited(_, ref delim) => &delim.tts,
_ => continue, _ => continue,
}; };
if let Success(_) = if let Success(_) =
@ -677,11 +677,11 @@ impl FirstSets {
first.replace_with(tt.clone()); first.replace_with(tt.clone());
} }
TokenTree::Delimited(span, ref delimited) => { TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts[..]); build_recur(sets, &delimited.tts);
first.replace_with(delimited.open_tt(span)); first.replace_with(delimited.open_tt(span));
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts[..]); let subfirst = build_recur(sets, &seq_rep.tts);
match sets.first.entry(sp.entire()) { match sets.first.entry(sp.entire()) {
Entry::Vacant(vac) => { Entry::Vacant(vac) => {
@ -748,7 +748,7 @@ impl FirstSets {
let subfirst = match self.first.get(&sp.entire()) { let subfirst = match self.first.get(&sp.entire()) {
Some(&Some(ref subfirst)) => subfirst, Some(&Some(ref subfirst)) => subfirst,
Some(&None) => { Some(&None) => {
subfirst_owned = self.first(&seq_rep.tts[..]); subfirst_owned = self.first(&seq_rep.tts);
&subfirst_owned &subfirst_owned
} }
None => { None => {

View file

@ -175,12 +175,12 @@ pub(super) fn transcribe<'a>(
)); ));
} }
LockstepIterSize::Contradiction(ref msg) => { LockstepIterSize::Contradiction(msg) => {
// FIXME: this really ought to be caught at macro definition time... It // FIXME: this really ought to be caught at macro definition time... It
// happens when two meta-variables are used in the same repetition in a // happens when two meta-variables are used in the same repetition in a
// sequence, but they come from different sequence matchers and repeat // sequence, but they come from different sequence matchers and repeat
// different amounts. // different amounts.
return Err(cx.struct_span_err(seq.span(), &msg[..])); return Err(cx.struct_span_err(seq.span(), &msg));
} }
LockstepIterSize::Constraint(len, _) => { LockstepIterSize::Constraint(len, _) => {

View file

@ -659,7 +659,7 @@ where
} }
writeln!(text, ";").unwrap(); writeln!(text, ";").unwrap();
w.write_all(&text[..])?; w.write_all(&text)?;
text.clear(); text.clear();
} }
@ -684,7 +684,7 @@ where
} }
writeln!(text, ";").unwrap(); writeln!(text, ";").unwrap();
w.write_all(&text[..])?; w.write_all(&text)?;
text.clear(); text.clear();
} }

View file

@ -316,7 +316,7 @@ impl<'a> State<'a> {
} }
hir::TyKind::Tup(ref elts) => { hir::TyKind::Tup(ref elts) => {
self.popen(); self.popen();
self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&ty)); self.commasep(Inconsistent, &elts, |s, ty| s.print_type(&ty));
if elts.len() == 1 { if elts.len() == 1 {
self.word(","); self.word(",");
} }
@ -1860,7 +1860,7 @@ impl<'a> State<'a> {
self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p)); self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p));
} }
} else { } else {
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p)); self.commasep(Inconsistent, &elts, |s, p| s.print_pat(&p));
} }
self.pclose(); self.pclose();
} }
@ -1873,7 +1873,7 @@ impl<'a> State<'a> {
self.word_space("{"); self.word_space("{");
self.commasep_cmnt( self.commasep_cmnt(
Consistent, Consistent,
&fields[..], &fields,
|s, f| { |s, f| {
s.cbox(INDENT_UNIT); s.cbox(INDENT_UNIT);
if !f.is_shorthand { if !f.is_shorthand {
@ -1895,7 +1895,7 @@ impl<'a> State<'a> {
self.word("}"); self.word("}");
} }
PatKind::Or(ref pats) => { PatKind::Or(ref pats) => {
self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(&p)); self.strsep("|", true, Inconsistent, &pats, |s, p| s.print_pat(&p));
} }
PatKind::Tuple(ref elts, ddpos) => { PatKind::Tuple(ref elts, ddpos) => {
self.popen(); self.popen();
@ -1956,7 +1956,7 @@ impl<'a> State<'a> {
} }
PatKind::Slice(ref before, ref slice, ref after) => { PatKind::Slice(ref before, ref slice, ref after) => {
self.word("["); self.word("[");
self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(&p)); self.commasep(Inconsistent, &before, |s, p| s.print_pat(&p));
if let Some(ref p) = *slice { if let Some(ref p) = *slice {
if !before.is_empty() { if !before.is_empty() {
self.word_space(","); self.word_space(",");
@ -1971,7 +1971,7 @@ impl<'a> State<'a> {
self.word_space(","); self.word_space(",");
} }
} }
self.commasep(Inconsistent, &after[..], |s, p| s.print_pat(&p)); self.commasep(Inconsistent, &after, |s, p| s.print_pat(&p));
self.word("]"); self.word("]");
} }
} }

View file

@ -52,6 +52,7 @@ use std::env;
use std::fs::{self, File}; use std::fs::{self, File};
use std::io::{BufWriter, Write}; use std::io::{BufWriter, Write};
#[allow(missing_docs)]
pub fn assert_dep_graph(tcx: TyCtxt<'_>) { pub fn assert_dep_graph(tcx: TyCtxt<'_>) {
tcx.dep_graph.with_ignore(|| { tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.debugging_opts.dump_dep_graph { if tcx.sess.opts.debugging_opts.dump_dep_graph {
@ -262,6 +263,7 @@ fn dump_graph(query: &DepGraphQuery) {
} }
} }
#[allow(missing_docs)]
pub struct GraphvizDepGraph<'q>(FxHashSet<&'q DepNode>, Vec<(&'q DepNode, &'q DepNode)>); pub struct GraphvizDepGraph<'q>(FxHashSet<&'q DepNode>, Vec<(&'q DepNode, &'q DepNode)>);
impl<'a, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> { impl<'a, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {

View file

@ -29,6 +29,7 @@ use rustc_session::cgu_reuse_tracker::*;
use rustc_span::symbol::{sym, Symbol}; use rustc_span::symbol::{sym, Symbol};
use std::collections::BTreeSet; use std::collections::BTreeSet;
#[allow(missing_docs)]
pub fn assert_module_sources(tcx: TyCtxt<'_>) { pub fn assert_module_sources(tcx: TyCtxt<'_>) {
tcx.dep_graph.with_ignore(|| { tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.incremental.is_none() { if tcx.sess.opts.incremental.is_none() {

View file

@ -1,5 +1,6 @@
//! Support for serializing the dep-graph and reloading it. //! Support for serializing the dep-graph and reloading it.
#![deny(missing_docs)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(in_band_lifetimes)] #![feature(in_band_lifetimes)]
#![feature(let_else)] #![feature(let_else)]

View file

@ -133,21 +133,26 @@ const QUERY_CACHE_FILENAME: &str = "query-cache.bin";
// case-sensitive (as opposed to base64, for example). // case-sensitive (as opposed to base64, for example).
const INT_ENCODE_BASE: usize = base_n::CASE_INSENSITIVE; const INT_ENCODE_BASE: usize = base_n::CASE_INSENSITIVE;
/// Returns the path to a session's dependency graph.
pub fn dep_graph_path(sess: &Session) -> PathBuf { pub fn dep_graph_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, DEP_GRAPH_FILENAME) in_incr_comp_dir_sess(sess, DEP_GRAPH_FILENAME)
} }
/// Returns the path to a session's staging dependency graph.
///
/// On the difference between dep-graph and staging dep-graph,
/// see `build_dep_graph`.
pub fn staging_dep_graph_path(sess: &Session) -> PathBuf { pub fn staging_dep_graph_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, STAGING_DEP_GRAPH_FILENAME) in_incr_comp_dir_sess(sess, STAGING_DEP_GRAPH_FILENAME)
} }
pub fn work_products_path(sess: &Session) -> PathBuf { pub fn work_products_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, WORK_PRODUCTS_FILENAME) in_incr_comp_dir_sess(sess, WORK_PRODUCTS_FILENAME)
} }
/// Returns the path to a session's query cache.
pub fn query_cache_path(sess: &Session) -> PathBuf { pub fn query_cache_path(sess: &Session) -> PathBuf {
in_incr_comp_dir_sess(sess, QUERY_CACHE_FILENAME) in_incr_comp_dir_sess(sess, QUERY_CACHE_FILENAME)
} }
/// Locks a given session directory.
pub fn lock_file_path(session_dir: &Path) -> PathBuf { pub fn lock_file_path(session_dir: &Path) -> PathBuf {
let crate_dir = session_dir.parent().unwrap(); let crate_dir = session_dir.parent().unwrap();
@ -166,23 +171,35 @@ pub fn lock_file_path(session_dir: &Path) -> PathBuf {
crate_dir.join(&directory_name[0..dash_indices[2]]).with_extension(&LOCK_FILE_EXT[1..]) crate_dir.join(&directory_name[0..dash_indices[2]]).with_extension(&LOCK_FILE_EXT[1..])
} }
/// Returns the path for a given filename within the incremental compilation directory
/// in the current session.
pub fn in_incr_comp_dir_sess(sess: &Session, file_name: &str) -> PathBuf { pub fn in_incr_comp_dir_sess(sess: &Session, file_name: &str) -> PathBuf {
in_incr_comp_dir(&sess.incr_comp_session_dir(), file_name) in_incr_comp_dir(&sess.incr_comp_session_dir(), file_name)
} }
/// Returns the path for a given filename within the incremental compilation directory,
/// not necessarily from the current session.
///
/// To ensure the file is part of the current session, use [`in_incr_comp_dir_sess`].
pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBuf { pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBuf {
incr_comp_session_dir.join(file_name) incr_comp_session_dir.join(file_name)
} }
/// Allocates the private session directory. The boolean in the Ok() result /// Allocates the private session directory.
/// indicates whether we should try loading a dep graph from the successfully ///
/// initialized directory, or not. /// If the result of this function is `Ok`, we have a valid incremental
/// The post-condition of this fn is that we have a valid incremental /// compilation session directory. A valid session
/// compilation session directory, if the result is `Ok`. A valid session
/// directory is one that contains a locked lock file. It may or may not contain /// directory is one that contains a locked lock file. It may or may not contain
/// a dep-graph and work products from a previous session. /// a dep-graph and work products from a previous session.
/// If the call fails, the fn may leave behind an invalid session directory. ///
/// This always attempts to load a dep-graph from the directory.
/// If loading fails for some reason, we fallback to a disabled `DepGraph`.
/// See [`rustc_interface::queries::dep_graph`].
///
/// If this function returns an error, it may leave behind an invalid session directory.
/// The garbage collection will take care of it. /// The garbage collection will take care of it.
///
/// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph
pub fn prepare_session_directory( pub fn prepare_session_directory(
sess: &Session, sess: &Session,
crate_name: &str, crate_name: &str,
@ -661,6 +678,7 @@ fn is_old_enough_to_be_collected(timestamp: SystemTime) -> bool {
timestamp < SystemTime::now() - Duration::from_secs(10) timestamp < SystemTime::now() - Duration::from_secs(10)
} }
/// Runs garbage collection for the current session.
pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> { pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
debug!("garbage_collect_session_directories() - begin"); debug!("garbage_collect_session_directories() - begin");

View file

@ -18,13 +18,24 @@ use super::work_product;
type WorkProductMap = FxHashMap<WorkProductId, WorkProduct>; type WorkProductMap = FxHashMap<WorkProductId, WorkProduct>;
#[derive(Debug)] #[derive(Debug)]
/// Represents the result of an attempt to load incremental compilation data.
pub enum LoadResult<T> { pub enum LoadResult<T> {
Ok { data: T }, /// Loading was successful.
Ok {
#[allow(missing_docs)]
data: T,
},
/// The file either didn't exist or was produced by an incompatible compiler version.
DataOutOfDate, DataOutOfDate,
Error { message: String }, /// An error occured.
Error {
#[allow(missing_docs)]
message: String,
},
} }
impl<T: Default> LoadResult<T> { impl<T: Default> LoadResult<T> {
/// Accesses the data returned in [`LoadResult::Ok`].
pub fn open(self, sess: &Session) -> T { pub fn open(self, sess: &Session) -> T {
// Check for errors when using `-Zassert-incremental-state` // Check for errors when using `-Zassert-incremental-state`
match (sess.opts.assert_incr_state, &self) { match (sess.opts.assert_incr_state, &self) {
@ -99,6 +110,7 @@ pub enum MaybeAsync<T> {
} }
impl<T> MaybeAsync<LoadResult<T>> { impl<T> MaybeAsync<LoadResult<T>> {
/// Accesses the data returned in [`LoadResult::Ok`] in an asynchronous way if possible.
pub fn open(self) -> LoadResult<T> { pub fn open(self) -> LoadResult<T> {
match self { match self {
MaybeAsync::Sync(result) => result, MaybeAsync::Sync(result) => result,
@ -109,6 +121,7 @@ impl<T> MaybeAsync<LoadResult<T>> {
} }
} }
/// An asynchronous type for computing the dependency graph.
pub type DepGraphFuture = MaybeAsync<LoadResult<(SerializedDepGraph, WorkProductMap)>>; pub type DepGraphFuture = MaybeAsync<LoadResult<(SerializedDepGraph, WorkProductMap)>>;
/// Launch a thread and load the dependency graph in the background. /// Launch a thread and load the dependency graph in the background.
@ -151,7 +164,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
compilation session directory: {}", compilation session directory: {}",
e e
); );
sess.fatal(&msg[..]) sess.fatal(&msg)
}); });
for swp in work_products { for swp in work_products {

View file

@ -13,9 +13,13 @@ use super::file_format;
use super::fs::*; use super::fs::*;
use super::work_product; use super::work_product;
/// Save and dump the DepGraph. /// Saves and writes the [`DepGraph`] to the file system.
/// ///
/// No query must be invoked after this function. /// This function saves both the dep-graph and the query result cache,
/// and drops the result cache.
///
/// This function should only run after all queries have completed.
/// Trying to execute a query afterwards would attempt to read the result cache we just dropped.
pub fn save_dep_graph(tcx: TyCtxt<'_>) { pub fn save_dep_graph(tcx: TyCtxt<'_>) {
debug!("save_dep_graph()"); debug!("save_dep_graph()");
tcx.dep_graph.with_ignore(|| { tcx.dep_graph.with_ignore(|| {
@ -75,6 +79,7 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
}) })
} }
/// Saves the work product index.
pub fn save_work_product_index( pub fn save_work_product_index(
sess: &Session, sess: &Session,
dep_graph: &DepGraph, dep_graph: &DepGraph,
@ -139,6 +144,12 @@ fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut FileEncoder) -> FileEncodeR
tcx.sess.time("incr_comp_serialize_result_cache", || tcx.serialize_query_result_cache(encoder)) tcx.sess.time("incr_comp_serialize_result_cache", || tcx.serialize_query_result_cache(encoder))
} }
/// Builds the dependency graph.
///
/// This function breates the *staging dep-graph*. When the dep-graph is modified by a query
/// execution, the new dependency information is not kept in memory but directly
/// output to this file. `save_dep_graph` then finalizes the staging dep-graph
/// and moves it to the permanent dep-graph path
pub fn build_dep_graph( pub fn build_dep_graph(
sess: &Session, sess: &Session,
prev_graph: SerializedDepGraph, prev_graph: SerializedDepGraph,

View file

@ -1,4 +1,6 @@
//! This module contains files for saving intermediate work-products. //! Functions for saving and removing intermediate [work products].
//!
//! [work products]: WorkProduct
use crate::persist::fs::*; use crate::persist::fs::*;
use rustc_fs_util::link_or_copy; use rustc_fs_util::link_or_copy;
@ -7,6 +9,7 @@ use rustc_session::Session;
use std::fs as std_fs; use std::fs as std_fs;
use std::path::PathBuf; use std::path::PathBuf;
/// Copies a CGU work product to the incremental compilation directory, so next compilation can find and reuse it.
pub fn copy_cgu_workproduct_to_incr_comp_cache_dir( pub fn copy_cgu_workproduct_to_incr_comp_cache_dir(
sess: &Session, sess: &Session,
cgu_name: &str, cgu_name: &str,
@ -40,6 +43,7 @@ pub fn copy_cgu_workproduct_to_incr_comp_cache_dir(
Some((work_product_id, work_product)) Some((work_product_id, work_product))
} }
/// Removes files for a given work product.
pub fn delete_workproduct_files(sess: &Session, work_product: &WorkProduct) { pub fn delete_workproduct_files(sess: &Session, work_product: &WorkProduct) {
if let Some(ref file_name) = work_product.saved_file { if let Some(ref file_name) = work_product.saved_file {
let path = in_incr_comp_dir_sess(sess, file_name); let path = in_incr_comp_dir_sess(sess, file_name);

View file

@ -207,7 +207,7 @@ fn check_panic_str<'tcx>(
arg: &'tcx hir::Expr<'tcx>, arg: &'tcx hir::Expr<'tcx>,
fmt: &str, fmt: &str,
) { ) {
if !fmt.contains(&['{', '}'][..]) { if !fmt.contains(&['{', '}']) {
// No brace, no problem. // No brace, no problem.
return; return;
} }

View file

@ -132,7 +132,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> {
if let Some(modifiers) = item.value_str() { if let Some(modifiers) = item.value_str() {
let span = item.name_value_literal_span().unwrap(); let span = item.name_value_literal_span().unwrap();
for modifier in modifiers.as_str().split(',') { for modifier in modifiers.as_str().split(',') {
let (modifier, value) = match modifier.strip_prefix(&['+', '-'][..]) { let (modifier, value) = match modifier.strip_prefix(&['+', '-']) {
Some(m) => (m, modifier.starts_with('+')), Some(m) => (m, modifier.starts_with('+')),
None => { None => {
sess.span_err( sess.span_err(

View file

@ -2119,7 +2119,7 @@ impl EncodedMetadata {
#[inline] #[inline]
pub fn raw_data(&self) -> &[u8] { pub fn raw_data(&self) -> &[u8] {
&self.raw_data[..] &self.raw_data
} }
} }

View file

@ -530,6 +530,6 @@ impl CodegenUnitNameBuilder<'tcx> {
write!(cgu_name, ".{}", special_suffix).unwrap(); write!(cgu_name, ".{}", special_suffix).unwrap();
} }
Symbol::intern(&cgu_name[..]) Symbol::intern(&cgu_name)
} }
} }

View file

@ -342,7 +342,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| InlineAsm { destination: Some(ref t), cleanup: Some(ref u), .. } => { | InlineAsm { destination: Some(ref t), cleanup: Some(ref u), .. } => {
Some(t).into_iter().chain(slice::from_ref(u)) Some(t).into_iter().chain(slice::from_ref(u))
} }
SwitchInt { ref targets, .. } => None.into_iter().chain(&targets.targets[..]), SwitchInt { ref targets, .. } => None.into_iter().chain(&targets.targets),
FalseEdge { ref real_target, ref imaginary_target } => { FalseEdge { ref real_target, ref imaginary_target } => {
Some(real_target).into_iter().chain(slice::from_ref(imaginary_target)) Some(real_target).into_iter().chain(slice::from_ref(imaginary_target))
} }
@ -380,7 +380,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| InlineAsm { destination: Some(ref mut t), cleanup: Some(ref mut u), .. } => { | InlineAsm { destination: Some(ref mut t), cleanup: Some(ref mut u), .. } => {
Some(t).into_iter().chain(slice::from_mut(u)) Some(t).into_iter().chain(slice::from_mut(u))
} }
SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets.targets[..]), SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets.targets),
FalseEdge { ref mut real_target, ref mut imaginary_target } => { FalseEdge { ref mut real_target, ref mut imaginary_target } => {
Some(real_target).into_iter().chain(slice::from_mut(imaginary_target)) Some(real_target).into_iter().chain(slice::from_mut(imaginary_target))
} }

View file

@ -587,18 +587,18 @@ impl<'tcx, N> ImplSource<'tcx, N> {
pub fn borrow_nested_obligations(&self) -> &[N] { pub fn borrow_nested_obligations(&self) -> &[N] {
match &self { match &self {
ImplSource::UserDefined(i) => &i.nested[..], ImplSource::UserDefined(i) => &i.nested[..],
ImplSource::Param(n, _) => &n[..], ImplSource::Param(n, _) => &n,
ImplSource::Builtin(i) => &i.nested[..], ImplSource::Builtin(i) => &i.nested,
ImplSource::AutoImpl(d) => &d.nested[..], ImplSource::AutoImpl(d) => &d.nested,
ImplSource::Closure(c) => &c.nested[..], ImplSource::Closure(c) => &c.nested,
ImplSource::Generator(c) => &c.nested[..], ImplSource::Generator(c) => &c.nested,
ImplSource::Object(d) => &d.nested[..], ImplSource::Object(d) => &d.nested,
ImplSource::FnPointer(d) => &d.nested[..], ImplSource::FnPointer(d) => &d.nested,
ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData) ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
| ImplSource::Pointee(ImplSourcePointeeData) | ImplSource::Pointee(ImplSourcePointeeData)
| ImplSource::ConstDrop(ImplSourceConstDropData) => &[], | ImplSource::ConstDrop(ImplSourceConstDropData) => &[],
ImplSource::TraitAlias(d) => &d.nested[..], ImplSource::TraitAlias(d) => &d.nested,
ImplSource::TraitUpcasting(d) => &d.nested[..], ImplSource::TraitUpcasting(d) => &d.nested,
} }
} }

View file

@ -306,7 +306,7 @@ impl<'a> Parser<'a> {
} }
} }
let expect = tokens_to_string(&expected[..]); let expect = tokens_to_string(&expected);
let actual = super::token_descr(&self.token); let actual = super::token_descr(&self.token);
let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
let short_expect = if expected.len() > 6 { let short_expect = if expected.len() > 6 {
@ -909,7 +909,7 @@ impl<'a> Parser<'a> {
// So far we have parsed `foo<bar<`, consume the rest of the type args. // So far we have parsed `foo<bar<`, consume the rest of the type args.
let modifiers = let modifiers =
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
self.consume_tts(1, &modifiers[..]); self.consume_tts(1, &modifiers);
if !&[token::OpenDelim(token::Paren), token::ModSep] if !&[token::OpenDelim(token::Paren), token::ModSep]
.contains(&self.token.kind) .contains(&self.token.kind)
@ -1001,7 +1001,7 @@ impl<'a> Parser<'a> {
// Consume the fn call arguments. // Consume the fn call arguments.
let modifiers = let modifiers =
[(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)]; [(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
self.consume_tts(1, &modifiers[..]); self.consume_tts(1, &modifiers);
if self.token.kind == token::Eof { if self.token.kind == token::Eof {
// Not entirely sure that what we consumed were fn arguments, rollback. // Not entirely sure that what we consumed were fn arguments, rollback.

View file

@ -158,7 +158,7 @@ impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
// Wrap in a scope so we can borrow `data`. // Wrap in a scope so we can borrow `data`.
let footer: Footer = { let footer: Footer = {
let mut decoder = opaque::Decoder::new(&data[..], start_pos); let mut decoder = opaque::Decoder::new(&data, start_pos);
// Decode the *position* of the footer, which can be found in the // Decode the *position* of the footer, which can be found in the
// last 8 bytes of the file. // last 8 bytes of the file.

View file

@ -1735,7 +1735,7 @@ fn parse_native_lib_modifiers(
) -> (NativeLibKind, Option<bool>) { ) -> (NativeLibKind, Option<bool>) {
let mut verbatim = None; let mut verbatim = None;
for modifier in modifiers.split(',') { for modifier in modifiers.split(',') {
let (modifier, value) = match modifier.strip_prefix(&['+', '-'][..]) { let (modifier, value) = match modifier.strip_prefix(&['+', '-']) {
Some(m) => (m, modifier.starts_with('+')), Some(m) => (m, modifier.starts_with('+')),
None => early_error( None => early_error(
error_format, error_format,
@ -2027,7 +2027,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let unparsed_crate_types = matches.opt_strs("crate-type"); let unparsed_crate_types = matches.opt_strs("crate-type");
let crate_types = parse_crate_types_from_list(unparsed_crate_types) let crate_types = parse_crate_types_from_list(unparsed_crate_types)
.unwrap_or_else(|e| early_error(error_format, &e[..])); .unwrap_or_else(|e| early_error(error_format, &e));
let mut debugging_opts = DebuggingOptions::build(matches, error_format); let mut debugging_opts = DebuggingOptions::build(matches, error_format);
let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format); let (lint_opts, describe_lints, lint_cap) = get_cmd_lint_options(matches, error_format);
@ -2151,7 +2151,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let mut search_paths = vec![]; let mut search_paths = vec![];
for s in &matches.opt_strs("L") { for s in &matches.opt_strs("L") {
search_paths.push(SearchPath::from_cli_opt(&s[..], error_format)); search_paths.push(SearchPath::from_cli_opt(&s, error_format));
} }
let libs = parse_libs(matches, error_format); let libs = parse_libs(matches, error_format);

View file

@ -1383,7 +1383,7 @@ impl<S: Encoder> Encodable<S> for SourceFile {
// Encode the first element. // Encode the first element.
lines[0].encode(s)?; lines[0].encode(s)?;
let diff_iter = lines[..].array_windows().map(|&[fst, snd]| snd - fst); let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
match bytes_per_diff { match bytes_per_diff {
1 => { 1 => {
@ -1506,7 +1506,7 @@ impl SourceFile {
assert!(end_pos <= u32::MAX as usize); assert!(end_pos <= u32::MAX as usize);
let (lines, multibyte_chars, non_narrow_chars) = let (lines, multibyte_chars, non_narrow_chars) =
analyze_source_file::analyze_source_file(&src[..], start_pos); analyze_source_file::analyze_source_file(&src, start_pos);
SourceFile { SourceFile {
name, name,

View file

@ -231,7 +231,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
if let Ok(Some(command)) = if let Ok(Some(command)) =
OnUnimplementedDirective::of_item(self.tcx, trait_ref.def_id, def_id) OnUnimplementedDirective::of_item(self.tcx, trait_ref.def_id, def_id)
{ {
command.evaluate(self.tcx, trait_ref, &flags[..]) command.evaluate(self.tcx, trait_ref, &flags)
} else { } else {
OnUnimplementedNote::default() OnUnimplementedNote::default()
} }

View file

@ -804,7 +804,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
} else if let ObligationCauseCode::BindingObligation(_, _) } else if let ObligationCauseCode::BindingObligation(_, _)
| ObligationCauseCode::ItemObligation(_) = &*code | ObligationCauseCode::ItemObligation(_) = &*code
{ {
try_borrowing(*poly_trait_ref, &never_suggest_borrow[..]) try_borrowing(*poly_trait_ref, &never_suggest_borrow)
} else { } else {
false false
} }
@ -1132,7 +1132,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
<https://doc.rust-lang.org/book/ch17-02-trait-objects.html\ <https://doc.rust-lang.org/book/ch17-02-trait-objects.html\
#using-trait-objects-that-allow-for-values-of-different-types>"; #using-trait-objects-that-allow-for-values-of-different-types>";
let has_dyn = snippet.split_whitespace().next().map_or(false, |s| s == "dyn"); let has_dyn = snippet.split_whitespace().next().map_or(false, |s| s == "dyn");
let trait_obj = if has_dyn { &snippet[4..] } else { &snippet[..] }; let trait_obj = if has_dyn { &snippet[4..] } else { &snippet };
if only_never_return { if only_never_return {
// No return paths, probably using `panic!()` or similar. // No return paths, probably using `panic!()` or similar.
// Suggest `-> T`, `-> impl Trait`, and if `Trait` is object safe, `-> Box<dyn Trait>`. // Suggest `-> T`, `-> impl Trait`, and if `Trait` is object safe, `-> Box<dyn Trait>`.

View file

@ -1350,7 +1350,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
tcx, tcx,
span, span,
item.trait_ref().def_id(), item.trait_ref().def_id(),
&object_safety_violations[..], &object_safety_violations,
) )
.emit(); .emit();
return tcx.ty_error(); return tcx.ty_error();

View file

@ -496,7 +496,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
call_expr.span, call_expr.span,
call_expr, call_expr,
fn_sig.inputs(), fn_sig.inputs(),
&expected_arg_tys[..], &expected_arg_tys,
arg_exprs, arg_exprs,
fn_sig.c_variadic, fn_sig.c_variadic,
TupleArgumentsFlag::DontTupleArguments, TupleArgumentsFlag::DontTupleArguments,

View file

@ -1436,7 +1436,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
<dyn AstConv<'_>>::create_substs_for_generic_args( <dyn AstConv<'_>>::create_substs_for_generic_args(
tcx, tcx,
def_id, def_id,
&[][..], &[],
has_self, has_self,
self_ty, self_ty,
&arg_count, &arg_count,

View file

@ -54,13 +54,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let err_inputs = match tuple_arguments { let err_inputs = match tuple_arguments {
DontTupleArguments => err_inputs, DontTupleArguments => err_inputs,
TupleArguments => vec![self.tcx.intern_tup(&err_inputs[..])], TupleArguments => vec![self.tcx.intern_tup(&err_inputs)],
}; };
self.check_argument_types( self.check_argument_types(
sp, sp,
expr, expr,
&err_inputs[..], &err_inputs,
&[], &[],
args_no_rcvr, args_no_rcvr,
false, false,
@ -324,7 +324,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.point_at_type_arg_instead_of_call_if_possible(errors, expr); self.point_at_type_arg_instead_of_call_if_possible(errors, expr);
self.point_at_arg_instead_of_call_if_possible( self.point_at_arg_instead_of_call_if_possible(
errors, errors,
&final_arg_types[..], &final_arg_types,
expr, expr,
sp, sp,
&args, &args,

View file

@ -1372,7 +1372,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
if applicable_candidates.len() > 1 { if applicable_candidates.len() > 1 {
if let Some(pick) = if let Some(pick) =
self.collapse_candidates_to_trait_pick(self_ty, &applicable_candidates[..]) self.collapse_candidates_to_trait_pick(self_ty, &applicable_candidates)
{ {
return Some(Ok(pick)); return Some(Ok(pick));
} }

View file

@ -1344,7 +1344,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if candidates.len() > limit { if candidates.len() > limit {
msg.push_str(&format!("\nand {} others", candidates.len() - limit)); msg.push_str(&format!("\nand {} others", candidates.len() - limit));
} }
err.note(&msg[..]); err.note(&msg);
} }
} }

View file

@ -2998,9 +2998,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
) )
.emit(); .emit();
InlineAttr::None InlineAttr::None
} else if list_contains_name(&items[..], sym::always) { } else if list_contains_name(&items, sym::always) {
InlineAttr::Always InlineAttr::Always
} else if list_contains_name(&items[..], sym::never) { } else if list_contains_name(&items, sym::never) {
InlineAttr::Never InlineAttr::Never
} else { } else {
struct_span_err!( struct_span_err!(
@ -3034,9 +3034,9 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
if items.len() != 1 { if items.len() != 1 {
err(attr.span, "expected one argument"); err(attr.span, "expected one argument");
OptimizeAttr::None OptimizeAttr::None
} else if list_contains_name(&items[..], sym::size) { } else if list_contains_name(&items, sym::size) {
OptimizeAttr::Size OptimizeAttr::Size
} else if list_contains_name(&items[..], sym::speed) { } else if list_contains_name(&items, sym::speed) {
OptimizeAttr::Speed OptimizeAttr::Speed
} else { } else {
err(items[0].span(), "invalid argument"); err(items[0].span(), "invalid argument");

View file

@ -2107,10 +2107,11 @@ impl<K, V> BTreeMap<K, V> {
/// ``` /// ```
/// use std::collections::BTreeMap; /// use std::collections::BTreeMap;
/// ///
/// let mut map = BTreeMap::new(); /// let mut map = BTreeMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// // add 10 to the value if the key isn't "a" /// // add 10 to the value if the key isn't "a"
/// for (key, value) in map.iter_mut() { /// for (key, value) in map.iter_mut() {

View file

@ -2199,7 +2199,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
/// Clones and appends all elements in a slice to the `Vec`. /// Clones and appends all elements in a slice to the `Vec`.
/// ///
/// Iterates over the slice `other`, clones each element, and then appends /// Iterates over the slice `other`, clones each element, and then appends
/// it to this `Vec`. The `other` vector is traversed in-order. /// it to this `Vec`. The `other` slice is traversed in-order.
/// ///
/// Note that this function is same as [`extend`] except that it is /// Note that this function is same as [`extend`] except that it is
/// specialized to work with slices instead. If and when Rust gets /// specialized to work with slices instead. If and when Rust gets

View file

@ -2,7 +2,7 @@ use crate::future::Future;
use crate::pin::Pin; use crate::pin::Pin;
use crate::task::{Context, Poll}; use crate::task::{Context, Poll};
/// Creates a future that is immediately ready with a value. /// A future that is immediately ready with a value.
/// ///
/// This `struct` is created by [`ready()`]. See its /// This `struct` is created by [`ready()`]. See its
/// documentation for more. /// documentation for more.

View file

@ -2070,8 +2070,8 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {} const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// not do them during compile time // Therefore, compiletime_check and runtime_check are observably equivalent.
unsafe { unsafe {
const_eval_select((src, dst, count), compiletime_check, runtime_check); const_eval_select((src, dst, count), compiletime_check, runtime_check);
} }
@ -2161,8 +2161,8 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {} const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// not do them during compile time // Therefore, compiletime_check and runtime_check are observably equivalent.
unsafe { unsafe {
const_eval_select((src, dst), compiletime_check, runtime_check); const_eval_select((src, dst), compiletime_check, runtime_check);
} }
@ -2273,19 +2273,40 @@ pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
/// ///
/// # Safety /// # Safety
/// ///
/// This intrinsic allows breaking [referential transparency] in `const fn` /// The two functions must behave observably equivalent. Safe code in other
/// and is therefore `unsafe`. /// crates may assume that calling a `const fn` at compile-time and at run-time
/// produces the same result. A function that produces a different result when
/// evaluated at run-time, or has any other observable side-effects, is
/// *unsound*.
/// ///
/// Code that uses this intrinsic must be extremely careful to ensure that /// Here is an example of how this could cause a problem:
/// `const fn`s remain referentially-transparent independently of when they /// ```no_run
/// are evaluated. /// #![feature(const_eval_select)]
/// use std::hint::unreachable_unchecked;
/// use std::intrinsics::const_eval_select;
/// ///
/// The Rust compiler assumes that it is sound to replace a call to a `const /// // Crate A
/// fn` with the result produced by evaluating it at compile-time. If /// pub const fn inconsistent() -> i32 {
/// evaluating the function at run-time were to produce a different result, /// fn runtime() -> i32 { 1 }
/// or have any other observable side-effects, the behavior is undefined. /// const fn compiletime() -> i32 { 2 }
/// ///
/// [referential transparency]: https://en.wikipedia.org/wiki/Referential_transparency /// unsafe {
// // ⚠ This code violates the required equivalence of `compiletime`
/// // and `runtime`.
/// const_eval_select((), compiletime, runtime)
/// }
/// }
///
/// // Crate B
/// const X: i32 = inconsistent();
/// let x = inconsistent();
/// if x != X { unsafe { unreachable_unchecked(); }}
/// ```
///
/// This code causes Undefined Behavior when being run, since the
/// `unreachable_unchecked` is actually being reached. The bug is in *crate A*,
/// which violates the principle that a `const fn` must behave the same at
/// compile-time and at run-time. The unsafe code in crate B is fine.
#[unstable( #[unstable(
feature = "const_eval_select", feature = "const_eval_select",
issue = "none", issue = "none",

View file

@ -628,7 +628,7 @@ macro_rules! saturating_int_impl {
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "saturating_int_impl", issue = "87920")] #[unstable(feature = "saturating_int_impl", issue = "87920")]
#[rustc_const_stable(feature = "const_reverse_bits", since = "1.37.0")] #[rustc_const_unstable(feature = "saturating_int_impl", issue = "87920")]
#[must_use = "this returns the result of the operation, \ #[must_use = "this returns the result of the operation, \
without modifying the original"] without modifying the original"]
pub const fn reverse_bits(self) -> Self { pub const fn reverse_bits(self) -> Self {

View file

@ -2223,7 +2223,7 @@ macro_rules! uint_impl {
/// ``` /// ```
#[unstable(feature = "wrapping_next_power_of_two", issue = "32463", #[unstable(feature = "wrapping_next_power_of_two", issue = "32463",
reason = "needs decision on wrapping behaviour")] reason = "needs decision on wrapping behaviour")]
#[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] #[rustc_const_unstable(feature = "wrapping_next_power_of_two", issue = "32463")]
#[must_use = "this returns the result of the operation, \ #[must_use = "this returns the result of the operation, \
without modifying the original"] without modifying the original"]
pub const fn wrapping_next_power_of_two(self) -> Self { pub const fn wrapping_next_power_of_two(self) -> Self {

View file

@ -149,8 +149,8 @@ const fn debug_check_data_len<T>(data: *const T, len: usize) {
// it is not required for safety (the safety must be guatanteed by // it is not required for safety (the safety must be guatanteed by
// the `from_raw_parts[_mut]` caller). // the `from_raw_parts[_mut]` caller).
// //
// Since the checks are not required, we ignore them in CTFE as they can't // As per our safety precondition, we may assume that assertion above never fails.
// be done there (alignment does not make much sense there). // Therefore, noop and rt_check are observably equivalent.
unsafe { unsafe {
crate::intrinsics::const_eval_select((data,), noop, rt_check); crate::intrinsics::const_eval_select((data,), noop, rt_check);
} }

View file

@ -114,9 +114,9 @@
//! # Laziness //! # Laziness
//! //!
//! Streams are *lazy*. This means that just creating a stream doesn't _do_ a //! Streams are *lazy*. This means that just creating a stream doesn't _do_ a
//! whole lot. Nothing really happens until you call `next`. This is sometimes a //! whole lot. Nothing really happens until you call `poll_next`. This is
//! source of confusion when creating a stream solely for its side effects. The //! sometimes a source of confusion when creating a stream solely for its side
//! compiler will warn us about this kind of behavior: //! effects. The compiler will warn us about this kind of behavior:
//! //!
//! ```text //! ```text
//! warning: unused result that must be used: streams do nothing unless polled //! warning: unused result that must be used: streams do nothing unless polled

View file

@ -334,10 +334,11 @@ impl<K, V, S> HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// for key in map.keys() { /// for key in map.keys() {
/// println!("{}", key); /// println!("{}", key);
@ -356,10 +357,11 @@ impl<K, V, S> HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// for val in map.values() { /// for val in map.values() {
/// println!("{}", val); /// println!("{}", val);
@ -378,11 +380,11 @@ impl<K, V, S> HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// /// ("a", 1),
/// map.insert("a", 1); /// ("b", 2),
/// map.insert("b", 2); /// ("c", 3),
/// map.insert("c", 3); /// ]);
/// ///
/// for val in map.values_mut() { /// for val in map.values_mut() {
/// *val = *val + 10; /// *val = *val + 10;
@ -405,10 +407,11 @@ impl<K, V, S> HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// for (key, val) in map.iter() { /// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val); /// println!("key: {} val: {}", key, val);
@ -428,10 +431,11 @@ impl<K, V, S> HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// // Update all values /// // Update all values
/// for (_, val) in map.iter_mut() { /// for (_, val) in map.iter_mut() {
@ -966,10 +970,11 @@ where
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// let mut vec: Vec<&str> = map.into_keys().collect(); /// let mut vec: Vec<&str> = map.into_keys().collect();
/// // The `IntoKeys` iterator produces keys in arbitrary order, so the /// // The `IntoKeys` iterator produces keys in arbitrary order, so the
@ -992,10 +997,11 @@ where
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// let mut vec: Vec<i32> = map.into_values().collect(); /// let mut vec: Vec<i32> = map.into_values().collect();
/// // The `IntoValues` iterator produces values in arbitrary order, so /// // The `IntoValues` iterator produces values in arbitrary order, so
@ -1202,8 +1208,9 @@ where
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter = map.iter(); /// let iter = map.iter();
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1239,8 +1246,9 @@ impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter = map.iter_mut(); /// let iter = map.iter_mut();
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1269,8 +1277,9 @@ impl<'a, K, V> IterMut<'a, K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter = map.into_iter(); /// let iter = map.into_iter();
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1298,8 +1307,9 @@ impl<K, V> IntoIter<K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter_keys = map.keys(); /// let iter_keys = map.keys();
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1335,8 +1345,9 @@ impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter_values = map.values(); /// let iter_values = map.values();
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1372,8 +1383,9 @@ impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter = map.drain(); /// let iter = map.drain();
/// ``` /// ```
#[stable(feature = "drain", since = "1.6.0")] #[stable(feature = "drain", since = "1.6.0")]
@ -1402,8 +1414,9 @@ impl<'a, K, V> Drain<'a, K, V> {
/// ///
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter = map.drain_filter(|_k, v| *v % 2 == 0); /// let iter = map.drain_filter(|_k, v| *v % 2 == 0);
/// ``` /// ```
#[unstable(feature = "hash_drain_filter", issue = "59618")] #[unstable(feature = "hash_drain_filter", issue = "59618")]
@ -1426,8 +1439,9 @@ where
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let mut map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter_values = map.values_mut(); /// let iter_values = map.values_mut();
/// ``` /// ```
#[stable(feature = "map_values_mut", since = "1.10.0")] #[stable(feature = "map_values_mut", since = "1.10.0")]
@ -1447,8 +1461,9 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter_keys = map.into_keys(); /// let iter_keys = map.into_keys();
/// ``` /// ```
#[stable(feature = "map_into_keys_values", since = "1.54.0")] #[stable(feature = "map_into_keys_values", since = "1.54.0")]
@ -1468,8 +1483,9 @@ pub struct IntoKeys<K, V> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// ]);
/// let iter_keys = map.into_values(); /// let iter_keys = map.into_values();
/// ``` /// ```
#[stable(feature = "map_into_keys_values", since = "1.54.0")] #[stable(feature = "map_into_keys_values", since = "1.54.0")]
@ -2004,10 +2020,11 @@ impl<K, V, S> IntoIterator for HashMap<K, V, S> {
/// ``` /// ```
/// use std::collections::HashMap; /// use std::collections::HashMap;
/// ///
/// let mut map = HashMap::new(); /// let map = HashMap::from([
/// map.insert("a", 1); /// ("a", 1),
/// map.insert("b", 2); /// ("b", 2),
/// map.insert("c", 3); /// ("c", 3),
/// ]);
/// ///
/// // Not possible with .iter() /// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect(); /// let vec: Vec<(&str, i32)> = map.into_iter().collect();

View file

@ -0,0 +1,36 @@
../coverage/issue-85461.rs:
1| |// Regression test for #85461: MSVC sometimes fail to link with dead code and #[inline(always)]
2| |
3| |extern crate inline_always_with_dead_code;
4| |
5| |use inline_always_with_dead_code::{bar, baz};
6| |
7| 1|fn main() {
8| 1| bar::call_me();
9| 1| baz::call_me();
10| 1|}
../coverage/lib/inline_always_with_dead_code.rs:
1| |// compile-flags: -Zinstrument-coverage -Ccodegen-units=4 -Copt-level=0
2| |
3| |#![allow(dead_code)]
4| |
5| |mod foo {
6| | #[inline(always)]
7| 2| pub fn called() { }
8| |
9| 0| fn uncalled() { }
10| |}
11| |
12| |pub mod bar {
13| 1| pub fn call_me() {
14| 1| super::foo::called();
15| 1| }
16| |}
17| |
18| |pub mod baz {
19| 1| pub fn call_me() {
20| 1| super::foo::called();
21| 1| }
22| |}

View file

@ -0,0 +1,10 @@
// Regression test for #85461: MSVC sometimes fail to link with dead code and #[inline(always)]
extern crate inline_always_with_dead_code;
use inline_always_with_dead_code::{bar, baz};
fn main() {
bar::call_me();
baz::call_me();
}

View file

@ -0,0 +1,22 @@
// compile-flags: -Zinstrument-coverage -Ccodegen-units=4 -Copt-level=0
#![allow(dead_code)]
mod foo {
#[inline(always)]
pub fn called() { }
fn uncalled() { }
}
pub mod bar {
pub fn call_me() {
super::foo::called();
}
}
pub mod baz {
pub fn call_me() {
super::foo::called();
}
}

View file

@ -0,0 +1,8 @@
fn main() {
let _ = format!(concat!("{0}𝖳𝖾𝗌𝗍{"), i);
//~^ ERROR: invalid format string: expected `'}'` but string was terminated
//~| NOTE: if you intended to print `{`, you can escape it using `{{`
//~| NOTE: in this expansion of concat!
//~| NOTE: in this expansion of concat!
//~| NOTE: expected `'}'` in format string
}

View file

@ -0,0 +1,11 @@
error: invalid format string: expected `'}'` but string was terminated
--> $DIR/issue-91556.rs:2:19
|
LL | let _ = format!(concat!("{0}𝖳𝖾𝗌𝗍{"), i);
| ^^^^^^^^^^^^^^^^^^^ expected `'}'` in format string
|
= note: if you intended to print `{`, you can escape it using `{{`
= note: this error originates in the macro `concat` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to previous error

View file

@ -0,0 +1,27 @@
// compile-flags: -Zinstrument-coverage -Ccodegen-units=4 --crate-type dylib -Copt-level=0
// build-pass
// needs-profiler-support
// Regression test for #85461 where MSVC sometimes fails to link instrument-coverage binaries
// with dead code and #[inline(always)].
#![allow(dead_code)]
mod foo {
#[inline(always)]
pub fn called() { }
fn uncalled() { }
}
pub mod bar {
pub fn call_me() {
super::foo::called();
}
}
pub mod baz {
pub fn call_me() {
super::foo::called();
}
}