Merge pull request #4270 from rust-lang/rustup-2025-04-12
Automatic Rustup
This commit is contained in:
commit
1f8cb690fc
358 changed files with 3355 additions and 1746 deletions
5
.github/ISSUE_TEMPLATE/tracking_issue.md
vendored
5
.github/ISSUE_TEMPLATE/tracking_issue.md
vendored
|
@ -41,7 +41,10 @@ for larger features an implementation could be broken up into multiple PRs.
|
|||
- [ ] Implement the RFC (cc @rust-lang/XXX -- can anyone write up mentoring
|
||||
instructions?)
|
||||
- [ ] Adjust documentation ([see instructions on rustc-dev-guide][doc-guide])
|
||||
- [ ] Formatting for new syntax has been added to the [Style Guide] ([nightly-style-procedure])
|
||||
- [ ] Style updates for any new syntax ([nightly-style-procedure])
|
||||
- [ ] Style team decision on new formatting
|
||||
- [ ] Formatting for new syntax has been added to the [Style Guide]
|
||||
- [ ] (non-blocking) Formatting has been implemented in `rustfmt`
|
||||
- [ ] Stabilization PR ([see instructions on rustc-dev-guide][stabilization-guide])
|
||||
|
||||
[stabilization-guide]: https://rustc-dev-guide.rust-lang.org/stabilization_guide.html#stabilization-pr
|
||||
|
|
16
Cargo.lock
16
Cargo.lock
|
@ -3217,17 +3217,6 @@ dependencies = [
|
|||
"tikv-jemalloc-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-rayon"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cd9fb077db982d7ceb42a90471e5a69a990b58f71e06f0d8340bb2cf35eb751"
|
||||
dependencies = [
|
||||
"either",
|
||||
"indexmap",
|
||||
"rustc-rayon-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-rayon-core"
|
||||
version = "0.5.0"
|
||||
|
@ -3599,7 +3588,7 @@ dependencies = [
|
|||
"parking_lot",
|
||||
"portable-atomic",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc-stable-hash",
|
||||
"rustc_arena",
|
||||
"rustc_graphviz",
|
||||
|
@ -3945,7 +3934,6 @@ dependencies = [
|
|||
name = "rustc_interface"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
|
@ -4409,7 +4397,6 @@ dependencies = [
|
|||
"rustc_feature",
|
||||
"rustc_fluent_macro",
|
||||
"rustc_hir",
|
||||
"rustc_index",
|
||||
"rustc_macros",
|
||||
"rustc_metadata",
|
||||
"rustc_middle",
|
||||
|
@ -4456,6 +4443,7 @@ dependencies = [
|
|||
"bitflags",
|
||||
"getopts",
|
||||
"libc",
|
||||
"rand 0.9.0",
|
||||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_data_structures",
|
||||
|
|
|
@ -13,12 +13,12 @@ pub mod typetree;
|
|||
#[derive(Debug, Clone, Encodable, Decodable, HashStable_Generic)]
|
||||
pub struct StrippedCfgItem<ModId = DefId> {
|
||||
pub parent_module: ModId,
|
||||
pub name: Ident,
|
||||
pub ident: Ident,
|
||||
pub cfg: MetaItem,
|
||||
}
|
||||
|
||||
impl<ModId> StrippedCfgItem<ModId> {
|
||||
pub fn map_mod_id<New>(self, f: impl FnOnce(ModId) -> New) -> StrippedCfgItem<New> {
|
||||
StrippedCfgItem { parent_module: f(self.parent_module), name: self.name, cfg: self.cfg }
|
||||
StrippedCfgItem { parent_module: f(self.parent_module), ident: self.ident, cfg: self.cfg }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ use rustc_errors::ErrorGuaranteed;
|
|||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{Asyncness, ResolverAstLowering};
|
||||
use rustc_span::{Ident, Span};
|
||||
use rustc_span::{Ident, Span, Symbol};
|
||||
use {rustc_ast as ast, rustc_hir as hir};
|
||||
|
||||
use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
|
||||
|
@ -234,12 +234,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
hir::FnSig { decl, header, span }
|
||||
}
|
||||
|
||||
fn generate_param(&mut self, span: Span) -> (hir::Param<'hir>, NodeId) {
|
||||
fn generate_param(&mut self, idx: usize, span: Span) -> (hir::Param<'hir>, NodeId) {
|
||||
let pat_node_id = self.next_node_id();
|
||||
let pat_id = self.lower_node_id(pat_node_id);
|
||||
let ident = Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}")));
|
||||
let pat = self.arena.alloc(hir::Pat {
|
||||
hir_id: pat_id,
|
||||
kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, Ident::empty(), None),
|
||||
kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
|
||||
span,
|
||||
default_binding_modes: false,
|
||||
});
|
||||
|
@ -247,9 +248,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
(hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
|
||||
}
|
||||
|
||||
fn generate_arg(&mut self, param_id: HirId, span: Span) -> hir::Expr<'hir> {
|
||||
fn generate_arg(&mut self, idx: usize, param_id: HirId, span: Span) -> hir::Expr<'hir> {
|
||||
let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
|
||||
ident: Ident::empty(),
|
||||
ident: Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}"))),
|
||||
hir_id: self.next_id(),
|
||||
res: Res::Local(param_id),
|
||||
args: None,
|
||||
|
@ -273,7 +274,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
|
||||
|
||||
for idx in 0..param_count {
|
||||
let (param, pat_node_id) = this.generate_param(span);
|
||||
let (param, pat_node_id) = this.generate_param(idx, span);
|
||||
parameters.push(param);
|
||||
|
||||
let arg = if let Some(block) = block
|
||||
|
@ -289,7 +290,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
|
||||
this.lower_target_expr(&block)
|
||||
} else {
|
||||
this.generate_arg(param.pat.hir_id, span)
|
||||
this.generate_arg(idx, param.pat.hir_id, span)
|
||||
};
|
||||
args.push(arg);
|
||||
}
|
||||
|
|
|
@ -645,7 +645,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
(
|
||||
// Disallow `impl Trait` in foreign items.
|
||||
this.lower_fn_decl(fdec, i.id, sig.span, FnDeclKind::ExternFn, None),
|
||||
this.lower_fn_params_to_names(fdec),
|
||||
this.lower_fn_params_to_idents(fdec),
|
||||
)
|
||||
});
|
||||
|
||||
|
@ -833,7 +833,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}) => {
|
||||
// FIXME(contracts): Deny contract here since it won't apply to
|
||||
// any impl method or callees.
|
||||
let names = self.lower_fn_params_to_names(&sig.decl);
|
||||
let idents = self.lower_fn_params_to_idents(&sig.decl);
|
||||
let (generics, sig) = self.lower_method_sig(
|
||||
generics,
|
||||
sig,
|
||||
|
@ -851,7 +851,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
(
|
||||
*ident,
|
||||
generics,
|
||||
hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)),
|
||||
hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(idents)),
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1247,7 +1247,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
safety: self.lower_safety(f.safety, hir::Safety::Safe),
|
||||
abi: self.lower_extern(f.ext),
|
||||
decl: self.lower_fn_decl(&f.decl, t.id, t.span, FnDeclKind::Pointer, None),
|
||||
param_names: self.lower_fn_params_to_names(&f.decl),
|
||||
param_idents: self.lower_fn_params_to_idents(&f.decl),
|
||||
}))
|
||||
}
|
||||
TyKind::UnsafeBinder(f) => {
|
||||
|
@ -1494,7 +1494,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}))
|
||||
}
|
||||
|
||||
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Option<Ident>] {
|
||||
fn lower_fn_params_to_idents(&mut self, decl: &FnDecl) -> &'hir [Option<Ident>] {
|
||||
self.arena.alloc_from_iter(decl.inputs.iter().map(|param| match param.pat.kind {
|
||||
PatKind::Missing => None,
|
||||
PatKind::Ident(_, ident, _) => Some(self.lower_ident(ident)),
|
||||
|
@ -2034,7 +2034,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
|
||||
fn lower_array_length_to_const_arg(&mut self, c: &AnonConst) -> &'hir hir::ConstArg<'hir> {
|
||||
match c.value.kind {
|
||||
// We cannot just match on `ExprKind::Underscore` as `(_)` is represented as
|
||||
// `ExprKind::Paren(ExprKind::Underscore)` and should also be lowered to `GenericArg::Infer`
|
||||
match c.value.peel_parens().kind {
|
||||
ExprKind::Underscore => {
|
||||
if !self.tcx.features().generic_arg_infer() {
|
||||
feature_err(
|
||||
|
|
|
@ -7,12 +7,12 @@ use super::*;
|
|||
fn fun_to_string(
|
||||
decl: &ast::FnDecl,
|
||||
header: ast::FnHeader,
|
||||
name: Ident,
|
||||
ident: Ident,
|
||||
generics: &ast::Generics,
|
||||
) -> String {
|
||||
to_string(|s| {
|
||||
s.head("");
|
||||
s.print_fn(decl, header, Some(name), generics);
|
||||
s.print_fn(decl, header, Some(ident), generics);
|
||||
s.end(); // Close the head box.
|
||||
s.end(); // Close the outer box.
|
||||
})
|
||||
|
|
|
@ -2500,11 +2500,11 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
|
|||
);
|
||||
let ty::Tuple(params) = tupled_params.kind() else { return };
|
||||
|
||||
// Find the first argument with a matching type, get its name
|
||||
let Some(this_name) = params.iter().zip(tcx.hir_body_param_names(closure.body)).find_map(
|
||||
|(param_ty, name)| {
|
||||
// Find the first argument with a matching type and get its identifier.
|
||||
let Some(this_name) = params.iter().zip(tcx.hir_body_param_idents(closure.body)).find_map(
|
||||
|(param_ty, ident)| {
|
||||
// FIXME: also support deref for stuff like `Rc` arguments
|
||||
if param_ty.peel_refs() == local_ty { name } else { None }
|
||||
if param_ty.peel_refs() == local_ty { ident } else { None }
|
||||
},
|
||||
) else {
|
||||
return;
|
||||
|
@ -3774,7 +3774,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
|
|||
method_args,
|
||||
*fn_span,
|
||||
call_source.from_hir_call(),
|
||||
self.infcx.tcx.fn_arg_names(method_did)[0],
|
||||
self.infcx.tcx.fn_arg_idents(method_did)[0],
|
||||
)
|
||||
{
|
||||
err.note(format!("borrow occurs due to deref coercion to `{deref_target_ty}`"));
|
||||
|
|
|
@ -1026,7 +1026,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
|
|||
method_args,
|
||||
*fn_span,
|
||||
call_source.from_hir_call(),
|
||||
self.infcx.tcx.fn_arg_names(method_did)[0],
|
||||
self.infcx.tcx.fn_arg_idents(method_did)[0],
|
||||
);
|
||||
|
||||
return FnSelfUse {
|
||||
|
|
|
@ -234,7 +234,7 @@ mod llvm_enzyme {
|
|||
let meta_item_vec: ThinVec<MetaItemInner> = match meta_item.kind {
|
||||
ast::MetaItemKind::List(ref vec) => vec.clone(),
|
||||
_ => {
|
||||
dcx.emit_err(errors::AutoDiffInvalidApplication { span: item.span() });
|
||||
dcx.emit_err(errors::AutoDiffMissingConfig { span: item.span() });
|
||||
return vec![item];
|
||||
}
|
||||
};
|
||||
|
|
|
@ -20,14 +20,14 @@ use crate::errors;
|
|||
struct ProcMacroDerive {
|
||||
id: NodeId,
|
||||
trait_name: Symbol,
|
||||
function_name: Ident,
|
||||
function_ident: Ident,
|
||||
span: Span,
|
||||
attrs: Vec<Symbol>,
|
||||
}
|
||||
|
||||
struct ProcMacroDef {
|
||||
id: NodeId,
|
||||
function_name: Ident,
|
||||
function_ident: Ident,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ impl<'a> CollectProcMacros<'a> {
|
|||
fn collect_custom_derive(
|
||||
&mut self,
|
||||
item: &'a ast::Item,
|
||||
function_name: Ident,
|
||||
function_ident: Ident,
|
||||
attr: &'a ast::Attribute,
|
||||
) {
|
||||
let Some((trait_name, proc_attrs)) =
|
||||
|
@ -109,7 +109,7 @@ impl<'a> CollectProcMacros<'a> {
|
|||
id: item.id,
|
||||
span: item.span,
|
||||
trait_name,
|
||||
function_name,
|
||||
function_ident,
|
||||
attrs: proc_attrs,
|
||||
}));
|
||||
} else {
|
||||
|
@ -123,12 +123,12 @@ impl<'a> CollectProcMacros<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, function_name: Ident) {
|
||||
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item, function_ident: Ident) {
|
||||
if self.in_root && item.vis.kind.is_pub() {
|
||||
self.macros.push(ProcMacro::Attr(ProcMacroDef {
|
||||
id: item.id,
|
||||
span: item.span,
|
||||
function_name,
|
||||
function_ident,
|
||||
}));
|
||||
} else {
|
||||
let msg = if !self.in_root {
|
||||
|
@ -141,12 +141,12 @@ impl<'a> CollectProcMacros<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, function_name: Ident) {
|
||||
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item, function_ident: Ident) {
|
||||
if self.in_root && item.vis.kind.is_pub() {
|
||||
self.macros.push(ProcMacro::Bang(ProcMacroDef {
|
||||
id: item.id,
|
||||
span: item.span,
|
||||
function_name,
|
||||
function_ident,
|
||||
}));
|
||||
} else {
|
||||
let msg = if !self.in_root {
|
||||
|
@ -303,7 +303,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
|
|||
ProcMacro::Derive(m) => m.span,
|
||||
ProcMacro::Attr(m) | ProcMacro::Bang(m) => m.span,
|
||||
};
|
||||
let local_path = |cx: &ExtCtxt<'_>, name| cx.expr_path(cx.path(span, vec![name]));
|
||||
let local_path = |cx: &ExtCtxt<'_>, ident| cx.expr_path(cx.path(span, vec![ident]));
|
||||
let proc_macro_ty_method_path = |cx: &ExtCtxt<'_>, method| {
|
||||
cx.expr_path(cx.path(
|
||||
span.with_ctxt(harness_span.ctxt()),
|
||||
|
@ -327,7 +327,7 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
|
|||
.map(|&s| cx.expr_str(span, s))
|
||||
.collect::<ThinVec<_>>(),
|
||||
),
|
||||
local_path(cx, cd.function_name),
|
||||
local_path(cx, cd.function_ident),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
@ -345,8 +345,8 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
|
|||
harness_span,
|
||||
proc_macro_ty_method_path(cx, ident),
|
||||
thin_vec![
|
||||
cx.expr_str(span, ca.function_name.name),
|
||||
local_path(cx, ca.function_name),
|
||||
cx.expr_str(span, ca.function_ident.name),
|
||||
local_path(cx, ca.function_ident),
|
||||
],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -169,8 +169,11 @@ fn produce_final_output_artifacts(
|
|||
if codegen_results.modules.len() == 1 {
|
||||
// 1) Only one codegen unit. In this case it's no difficulty
|
||||
// to copy `foo.0.x` to `foo.x`.
|
||||
let module_name = Some(&codegen_results.modules[0].name[..]);
|
||||
let path = crate_output.temp_path(output_type, module_name);
|
||||
let path = crate_output.temp_path_for_cgu(
|
||||
output_type,
|
||||
&codegen_results.modules[0].name,
|
||||
sess.invocation_temp.as_deref(),
|
||||
);
|
||||
let output = crate_output.path(output_type);
|
||||
if !output_type.is_text_output() && output.is_tty() {
|
||||
sess.dcx()
|
||||
|
@ -183,22 +186,16 @@ fn produce_final_output_artifacts(
|
|||
ensure_removed(sess.dcx(), &path);
|
||||
}
|
||||
} else {
|
||||
let extension = crate_output
|
||||
.temp_path(output_type, None)
|
||||
.extension()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
|
||||
if crate_output.outputs.contains_explicit_name(&output_type) {
|
||||
// 2) Multiple codegen units, with `--emit foo=some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(ssa_errors::IgnoringEmitPath { extension });
|
||||
sess.dcx()
|
||||
.emit_warn(ssa_errors::IgnoringEmitPath { extension: output_type.extension() });
|
||||
} else if crate_output.single_output_file.is_some() {
|
||||
// 3) Multiple codegen units, with `-o some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(ssa_errors::IgnoringOutput { extension });
|
||||
sess.dcx()
|
||||
.emit_warn(ssa_errors::IgnoringOutput { extension: output_type.extension() });
|
||||
} else {
|
||||
// 4) Multiple codegen units, but no explicit name. We
|
||||
// just leave the `foo.0.x` files in place.
|
||||
|
@ -351,6 +348,7 @@ fn make_module(sess: &Session, name: String) -> UnwindModule<ObjectModule> {
|
|||
|
||||
fn emit_cgu(
|
||||
output_filenames: &OutputFilenames,
|
||||
invocation_temp: Option<&str>,
|
||||
prof: &SelfProfilerRef,
|
||||
name: String,
|
||||
module: UnwindModule<ObjectModule>,
|
||||
|
@ -366,6 +364,7 @@ fn emit_cgu(
|
|||
|
||||
let module_regular = emit_module(
|
||||
output_filenames,
|
||||
invocation_temp,
|
||||
prof,
|
||||
product.object,
|
||||
ModuleKind::Regular,
|
||||
|
@ -391,6 +390,7 @@ fn emit_cgu(
|
|||
|
||||
fn emit_module(
|
||||
output_filenames: &OutputFilenames,
|
||||
invocation_temp: Option<&str>,
|
||||
prof: &SelfProfilerRef,
|
||||
mut object: cranelift_object::object::write::Object<'_>,
|
||||
kind: ModuleKind,
|
||||
|
@ -409,7 +409,7 @@ fn emit_module(
|
|||
object.set_section_data(comment_section, producer, 1);
|
||||
}
|
||||
|
||||
let tmp_file = output_filenames.temp_path(OutputType::Object, Some(&name));
|
||||
let tmp_file = output_filenames.temp_path_for_cgu(OutputType::Object, &name, invocation_temp);
|
||||
let file = match File::create(&tmp_file) {
|
||||
Ok(file) => file,
|
||||
Err(err) => return Err(format!("error creating object file: {}", err)),
|
||||
|
@ -449,8 +449,11 @@ fn reuse_workproduct_for_cgu(
|
|||
cgu: &CodegenUnit<'_>,
|
||||
) -> Result<ModuleCodegenResult, String> {
|
||||
let work_product = cgu.previous_work_product(tcx);
|
||||
let obj_out_regular =
|
||||
tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str()));
|
||||
let obj_out_regular = tcx.output_filenames(()).temp_path_for_cgu(
|
||||
OutputType::Object,
|
||||
cgu.name().as_str(),
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
);
|
||||
let source_file_regular = rustc_incremental::in_incr_comp_dir_sess(
|
||||
&tcx.sess,
|
||||
&work_product.saved_files.get("o").expect("no saved object file in work product"),
|
||||
|
@ -595,13 +598,19 @@ fn module_codegen(
|
|||
|
||||
let global_asm_object_file =
|
||||
profiler.generic_activity_with_arg("compile assembly", &*cgu_name).run(|| {
|
||||
crate::global_asm::compile_global_asm(&global_asm_config, &cgu_name, &cx.global_asm)
|
||||
crate::global_asm::compile_global_asm(
|
||||
&global_asm_config,
|
||||
&cgu_name,
|
||||
&cx.global_asm,
|
||||
cx.invocation_temp.as_deref(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let codegen_result =
|
||||
profiler.generic_activity_with_arg("write object file", &*cgu_name).run(|| {
|
||||
emit_cgu(
|
||||
&global_asm_config.output_filenames,
|
||||
cx.invocation_temp.as_deref(),
|
||||
&profiler,
|
||||
cgu_name,
|
||||
module,
|
||||
|
@ -626,8 +635,11 @@ fn emit_metadata_module(tcx: TyCtxt<'_>, metadata: &EncodedMetadata) -> Compiled
|
|||
.as_str()
|
||||
.to_string();
|
||||
|
||||
let tmp_file =
|
||||
tcx.output_filenames(()).temp_path(OutputType::Metadata, Some(&metadata_cgu_name));
|
||||
let tmp_file = tcx.output_filenames(()).temp_path_for_cgu(
|
||||
OutputType::Metadata,
|
||||
&metadata_cgu_name,
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
);
|
||||
|
||||
let symbol_name = rustc_middle::middle::exported_symbols::metadata_symbol_name(tcx);
|
||||
let obj = create_compressed_metadata_file(tcx.sess, metadata, &symbol_name);
|
||||
|
@ -657,6 +669,7 @@ fn emit_allocator_module(tcx: TyCtxt<'_>) -> Option<CompiledModule> {
|
|||
|
||||
match emit_module(
|
||||
tcx.output_filenames(()),
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
&tcx.sess.prof,
|
||||
product.object,
|
||||
ModuleKind::Allocator,
|
||||
|
@ -728,26 +741,27 @@ pub(crate) fn run_aot(
|
|||
|
||||
let concurrency_limiter = IntoDynSyncSend(ConcurrencyLimiter::new(todo_cgus.len()));
|
||||
|
||||
let modules = tcx.sess.time("codegen mono items", || {
|
||||
let mut modules: Vec<_> = par_map(todo_cgus, |(_, cgu)| {
|
||||
let dep_node = cgu.codegen_dep_node(tcx);
|
||||
tcx.dep_graph
|
||||
.with_task(
|
||||
let modules: Vec<_> =
|
||||
tcx.sess.time("codegen mono items", || {
|
||||
let modules: Vec<_> = par_map(todo_cgus, |(_, cgu)| {
|
||||
let dep_node = cgu.codegen_dep_node(tcx);
|
||||
let (module, _) = tcx.dep_graph.with_task(
|
||||
dep_node,
|
||||
tcx,
|
||||
(global_asm_config.clone(), cgu.name(), concurrency_limiter.acquire(tcx.dcx())),
|
||||
module_codegen,
|
||||
Some(rustc_middle::dep_graph::hash_result),
|
||||
)
|
||||
.0
|
||||
});
|
||||
modules.extend(
|
||||
done_cgus
|
||||
);
|
||||
IntoDynSyncSend(module)
|
||||
});
|
||||
modules
|
||||
.into_iter()
|
||||
.map(|(_, cgu)| OngoingModuleCodegen::Sync(reuse_workproduct_for_cgu(tcx, cgu))),
|
||||
);
|
||||
modules
|
||||
});
|
||||
.map(|module| module.0)
|
||||
.chain(done_cgus.into_iter().map(|(_, cgu)| {
|
||||
OngoingModuleCodegen::Sync(reuse_workproduct_for_cgu(tcx, cgu))
|
||||
}))
|
||||
.collect()
|
||||
});
|
||||
|
||||
let allocator_module = emit_allocator_module(tcx);
|
||||
|
||||
|
|
|
@ -132,6 +132,7 @@ pub(crate) fn compile_global_asm(
|
|||
config: &GlobalAsmConfig,
|
||||
cgu_name: &str,
|
||||
global_asm: &str,
|
||||
invocation_temp: Option<&str>,
|
||||
) -> Result<Option<PathBuf>, String> {
|
||||
if global_asm.is_empty() {
|
||||
return Ok(None);
|
||||
|
@ -146,7 +147,7 @@ pub(crate) fn compile_global_asm(
|
|||
global_asm.push('\n');
|
||||
|
||||
let global_asm_object_file = add_file_stem_postfix(
|
||||
config.output_filenames.temp_path(OutputType::Object, Some(cgu_name)),
|
||||
config.output_filenames.temp_path_for_cgu(OutputType::Object, cgu_name, invocation_temp),
|
||||
".asm",
|
||||
);
|
||||
|
||||
|
|
|
@ -124,6 +124,7 @@ impl<F: Fn() -> String> Drop for PrintOnPanic<F> {
|
|||
/// inside a single codegen unit with the exception of the Cranelift [`Module`](cranelift_module::Module).
|
||||
struct CodegenCx {
|
||||
output_filenames: Arc<OutputFilenames>,
|
||||
invocation_temp: Option<String>,
|
||||
should_write_ir: bool,
|
||||
global_asm: String,
|
||||
inline_asm_index: usize,
|
||||
|
@ -142,6 +143,7 @@ impl CodegenCx {
|
|||
};
|
||||
CodegenCx {
|
||||
output_filenames: tcx.output_filenames(()).clone(),
|
||||
invocation_temp: tcx.sess.invocation_temp.clone(),
|
||||
should_write_ir: crate::pretty_clif::should_write_ir(tcx),
|
||||
global_asm: String::new(),
|
||||
inline_asm_index: 0,
|
||||
|
|
|
@ -104,7 +104,7 @@ pub(crate) fn maybe_create_entry_wrapper(
|
|||
let termination_trait = tcx.require_lang_item(LangItem::Termination, None);
|
||||
let report = tcx
|
||||
.associated_items(termination_trait)
|
||||
.find_by_name_and_kind(
|
||||
.find_by_ident_and_kind(
|
||||
tcx,
|
||||
Ident::from_str("report"),
|
||||
AssocKind::Fn,
|
||||
|
|
|
@ -24,19 +24,23 @@ pub(crate) unsafe fn codegen(
|
|||
{
|
||||
let context = &module.module_llvm.context;
|
||||
|
||||
let module_name = module.name.clone();
|
||||
|
||||
let should_combine_object_files = module.module_llvm.should_combine_object_files;
|
||||
|
||||
let module_name = Some(&module_name[..]);
|
||||
|
||||
// NOTE: Only generate object files with GIMPLE when this environment variable is set for
|
||||
// now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit).
|
||||
// TODO(antoyo): remove this environment variable.
|
||||
let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1");
|
||||
|
||||
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
|
||||
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
|
||||
let bc_out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Bitcode,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
let obj_out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Object,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
|
||||
if config.bitcode_needed() {
|
||||
if fat_lto {
|
||||
|
@ -117,14 +121,22 @@ pub(crate) unsafe fn codegen(
|
|||
}
|
||||
|
||||
if config.emit_ir {
|
||||
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
|
||||
let out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::LlvmAssembly,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
std::fs::write(out, "").expect("write file");
|
||||
}
|
||||
|
||||
if config.emit_asm {
|
||||
let _timer =
|
||||
cgcx.prof.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name);
|
||||
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
|
||||
let path = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Assembly,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str"));
|
||||
}
|
||||
|
||||
|
@ -238,6 +250,7 @@ pub(crate) unsafe fn codegen(
|
|||
config.emit_asm,
|
||||
config.emit_ir,
|
||||
&cgcx.output_filenames,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
|
@ -119,14 +119,18 @@ pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTar
|
|||
tcx.output_filenames(()).split_dwarf_path(
|
||||
tcx.sess.split_debuginfo(),
|
||||
tcx.sess.opts.unstable_opts.split_dwarf_kind,
|
||||
Some(mod_name),
|
||||
mod_name,
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let output_obj_file =
|
||||
Some(tcx.output_filenames(()).temp_path(OutputType::Object, Some(mod_name)));
|
||||
let output_obj_file = Some(tcx.output_filenames(()).temp_path_for_cgu(
|
||||
OutputType::Object,
|
||||
mod_name,
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
));
|
||||
let config = TargetMachineFactoryConfig { split_dwarf_file, output_obj_file };
|
||||
|
||||
target_machine_factory(
|
||||
|
@ -330,8 +334,11 @@ pub(crate) fn save_temp_bitcode(
|
|||
return;
|
||||
}
|
||||
let ext = format!("{name}.bc");
|
||||
let cgu = Some(&module.name[..]);
|
||||
let path = cgcx.output_filenames.temp_path_ext(&ext, cgu);
|
||||
let path = cgcx.output_filenames.temp_path_ext_for_cgu(
|
||||
&ext,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
write_bitcode_to_file(module, &path)
|
||||
}
|
||||
|
||||
|
@ -694,11 +701,12 @@ pub(crate) unsafe fn optimize(
|
|||
let llcx = &*module.module_llvm.llcx;
|
||||
let _handlers = DiagnosticHandlers::new(cgcx, dcx, llcx, module, CodegenDiagnosticsStage::Opt);
|
||||
|
||||
let module_name = module.name.clone();
|
||||
let module_name = Some(&module_name[..]);
|
||||
|
||||
if config.emit_no_opt_bc {
|
||||
let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name);
|
||||
let out = cgcx.output_filenames.temp_path_ext_for_cgu(
|
||||
"no-opt.bc",
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
write_bitcode_to_file(module, &out)
|
||||
}
|
||||
|
||||
|
@ -743,8 +751,11 @@ pub(crate) unsafe fn optimize(
|
|||
if let Some(thin_lto_buffer) = thin_lto_buffer {
|
||||
let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) };
|
||||
module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec());
|
||||
let bc_summary_out =
|
||||
cgcx.output_filenames.temp_path(OutputType::ThinLinkBitcode, module_name);
|
||||
let bc_summary_out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::ThinLinkBitcode,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
if config.emit_thin_lto_summary
|
||||
&& let Some(thin_link_bitcode_filename) = bc_summary_out.file_name()
|
||||
{
|
||||
|
@ -801,8 +812,6 @@ pub(crate) unsafe fn codegen(
|
|||
let llmod = module.module_llvm.llmod();
|
||||
let llcx = &*module.module_llvm.llcx;
|
||||
let tm = &*module.module_llvm.tm;
|
||||
let module_name = module.name.clone();
|
||||
let module_name = Some(&module_name[..]);
|
||||
let _handlers =
|
||||
DiagnosticHandlers::new(cgcx, dcx, llcx, &module, CodegenDiagnosticsStage::Codegen);
|
||||
|
||||
|
@ -814,8 +823,16 @@ pub(crate) unsafe fn codegen(
|
|||
// copy it to the .o file, and delete the bitcode if it wasn't
|
||||
// otherwise requested.
|
||||
|
||||
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
|
||||
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
|
||||
let bc_out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Bitcode,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
let obj_out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Object,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
|
||||
if config.bitcode_needed() {
|
||||
if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
|
||||
|
@ -857,7 +874,11 @@ pub(crate) unsafe fn codegen(
|
|||
if config.emit_ir {
|
||||
let _timer =
|
||||
cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_ir", &*module.name);
|
||||
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
|
||||
let out = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::LlvmAssembly,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
let out_c = path_to_c_string(&out);
|
||||
|
||||
extern "C" fn demangle_callback(
|
||||
|
@ -899,7 +920,11 @@ pub(crate) unsafe fn codegen(
|
|||
if config.emit_asm {
|
||||
let _timer =
|
||||
cgcx.prof.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
|
||||
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
|
||||
let path = cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Assembly,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
|
||||
// We can't use the same module for asm and object code output,
|
||||
// because that triggers various errors like invalid IR or broken
|
||||
|
@ -929,7 +954,9 @@ pub(crate) unsafe fn codegen(
|
|||
.prof
|
||||
.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
|
||||
|
||||
let dwo_out = cgcx.output_filenames.temp_path_dwo(module_name);
|
||||
let dwo_out = cgcx
|
||||
.output_filenames
|
||||
.temp_path_dwo_for_cgu(&module.name, cgcx.invocation_temp.as_deref());
|
||||
let dwo_out = match (cgcx.split_debuginfo, cgcx.split_dwarf_kind) {
|
||||
// Don't change how DWARF is emitted when disabled.
|
||||
(SplitDebuginfo::Off, _) => None,
|
||||
|
@ -994,6 +1021,7 @@ pub(crate) unsafe fn codegen(
|
|||
config.emit_asm,
|
||||
config.emit_ir,
|
||||
&cgcx.output_filenames,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ use std::borrow::Borrow;
|
|||
|
||||
use libc::{c_char, c_uint};
|
||||
use rustc_abi as abi;
|
||||
use rustc_abi::HasDataLayout;
|
||||
use rustc_abi::Primitive::Pointer;
|
||||
use rustc_abi::{AddressSpace, HasDataLayout};
|
||||
use rustc_ast::Mutability;
|
||||
use rustc_codegen_ssa::common::TypeKind;
|
||||
use rustc_codegen_ssa::traits::*;
|
||||
|
@ -269,7 +269,8 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
|
|||
}
|
||||
Scalar::Ptr(ptr, _size) => {
|
||||
let (prov, offset) = ptr.into_parts();
|
||||
let (base_addr, base_addr_space) = match self.tcx.global_alloc(prov.alloc_id()) {
|
||||
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
|
||||
let base_addr = match global_alloc {
|
||||
GlobalAlloc::Memory(alloc) => {
|
||||
// For ZSTs directly codegen an aligned pointer.
|
||||
// This avoids generating a zero-sized constant value and actually needing a
|
||||
|
@ -301,12 +302,10 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
|
|||
format!("alloc_{hash:032x}").as_bytes(),
|
||||
);
|
||||
}
|
||||
(value, AddressSpace::DATA)
|
||||
value
|
||||
}
|
||||
}
|
||||
GlobalAlloc::Function { instance, .. } => {
|
||||
(self.get_fn_addr(instance), self.data_layout().instruction_address_space)
|
||||
}
|
||||
GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance),
|
||||
GlobalAlloc::VTable(ty, dyn_ty) => {
|
||||
let alloc = self
|
||||
.tcx
|
||||
|
@ -319,14 +318,15 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
|
|||
.unwrap_memory();
|
||||
let init = const_alloc_to_llvm(self, alloc, /*static*/ false);
|
||||
let value = self.static_addr_of_impl(init, alloc.inner().align, None);
|
||||
(value, AddressSpace::DATA)
|
||||
value
|
||||
}
|
||||
GlobalAlloc::Static(def_id) => {
|
||||
assert!(self.tcx.is_static(def_id));
|
||||
assert!(!self.tcx.is_thread_local_static(def_id));
|
||||
(self.get_static(def_id), AddressSpace::DATA)
|
||||
self.get_static(def_id)
|
||||
}
|
||||
};
|
||||
let base_addr_space = global_alloc.address_space(self);
|
||||
let llval = unsafe {
|
||||
llvm::LLVMConstInBoundsGEP2(
|
||||
self.type_i8(),
|
||||
|
|
|
@ -910,7 +910,8 @@ pub(crate) fn build_compile_unit_di_node<'ll, 'tcx>(
|
|||
&& let Some(f) = output_filenames.split_dwarf_path(
|
||||
tcx.sess.split_debuginfo(),
|
||||
tcx.sess.opts.unstable_opts.split_dwarf_kind,
|
||||
Some(codegen_unit_name),
|
||||
codegen_unit_name,
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
) {
|
||||
// We get a path relative to the working directory from split_dwarf_path
|
||||
Some(tcx.sess.source_map().path_mapping().to_real_filename(f))
|
||||
|
|
|
@ -1421,7 +1421,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
|||
return Ok(bx.shuffle_vector(args[0].immediate(), args[1].immediate(), indices));
|
||||
}
|
||||
|
||||
if name == sym::simd_insert {
|
||||
if name == sym::simd_insert || name == sym::simd_insert_dyn {
|
||||
require!(
|
||||
in_elem == arg_tys[2],
|
||||
InvalidMonomorphization::InsertedType {
|
||||
|
@ -1432,40 +1432,49 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
|
|||
out_ty: arg_tys[2]
|
||||
}
|
||||
);
|
||||
let idx = bx
|
||||
.const_to_opt_u128(args[1].immediate(), false)
|
||||
.expect("typeck should have ensure that this is a const");
|
||||
if idx >= in_len.into() {
|
||||
return_error!(InvalidMonomorphization::SimdIndexOutOfBounds {
|
||||
span,
|
||||
name,
|
||||
arg_idx: 1,
|
||||
total_len: in_len.into(),
|
||||
});
|
||||
}
|
||||
return Ok(bx.insert_element(
|
||||
args[0].immediate(),
|
||||
args[2].immediate(),
|
||||
bx.const_i32(idx as i32),
|
||||
));
|
||||
|
||||
let index_imm = if name == sym::simd_insert {
|
||||
let idx = bx
|
||||
.const_to_opt_u128(args[1].immediate(), false)
|
||||
.expect("typeck should have ensure that this is a const");
|
||||
if idx >= in_len.into() {
|
||||
return_error!(InvalidMonomorphization::SimdIndexOutOfBounds {
|
||||
span,
|
||||
name,
|
||||
arg_idx: 1,
|
||||
total_len: in_len.into(),
|
||||
});
|
||||
}
|
||||
bx.const_i32(idx as i32)
|
||||
} else {
|
||||
args[1].immediate()
|
||||
};
|
||||
|
||||
return Ok(bx.insert_element(args[0].immediate(), args[2].immediate(), index_imm));
|
||||
}
|
||||
if name == sym::simd_extract {
|
||||
if name == sym::simd_extract || name == sym::simd_extract_dyn {
|
||||
require!(
|
||||
ret_ty == in_elem,
|
||||
InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty }
|
||||
);
|
||||
let idx = bx
|
||||
.const_to_opt_u128(args[1].immediate(), false)
|
||||
.expect("typeck should have ensure that this is a const");
|
||||
if idx >= in_len.into() {
|
||||
return_error!(InvalidMonomorphization::SimdIndexOutOfBounds {
|
||||
span,
|
||||
name,
|
||||
arg_idx: 1,
|
||||
total_len: in_len.into(),
|
||||
});
|
||||
}
|
||||
return Ok(bx.extract_element(args[0].immediate(), bx.const_i32(idx as i32)));
|
||||
let index_imm = if name == sym::simd_extract {
|
||||
let idx = bx
|
||||
.const_to_opt_u128(args[1].immediate(), false)
|
||||
.expect("typeck should have ensure that this is a const");
|
||||
if idx >= in_len.into() {
|
||||
return_error!(InvalidMonomorphization::SimdIndexOutOfBounds {
|
||||
span,
|
||||
name,
|
||||
arg_idx: 1,
|
||||
total_len: in_len.into(),
|
||||
});
|
||||
}
|
||||
bx.const_i32(idx as i32)
|
||||
} else {
|
||||
args[1].immediate()
|
||||
};
|
||||
|
||||
return Ok(bx.extract_element(args[0].immediate(), index_imm));
|
||||
}
|
||||
|
||||
if name == sym::simd_select {
|
||||
|
|
|
@ -112,8 +112,12 @@ pub fn link_binary(
|
|||
codegen_results.crate_info.local_crate_name,
|
||||
);
|
||||
let crate_name = format!("{}", codegen_results.crate_info.local_crate_name);
|
||||
let out_filename =
|
||||
output.file_for_writing(outputs, OutputType::Exe, Some(crate_name.as_str()));
|
||||
let out_filename = output.file_for_writing(
|
||||
outputs,
|
||||
OutputType::Exe,
|
||||
&crate_name,
|
||||
sess.invocation_temp.as_deref(),
|
||||
);
|
||||
match crate_type {
|
||||
CrateType::Rlib => {
|
||||
let _timer = sess.timer("link_rlib");
|
||||
|
|
|
@ -306,14 +306,18 @@ impl TargetMachineFactoryConfig {
|
|||
cgcx.output_filenames.split_dwarf_path(
|
||||
cgcx.split_debuginfo,
|
||||
cgcx.split_dwarf_kind,
|
||||
Some(module_name),
|
||||
module_name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let output_obj_file =
|
||||
Some(cgcx.output_filenames.temp_path(OutputType::Object, Some(module_name)));
|
||||
let output_obj_file = Some(cgcx.output_filenames.temp_path_for_cgu(
|
||||
OutputType::Object,
|
||||
module_name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
));
|
||||
TargetMachineFactoryConfig { split_dwarf_file, output_obj_file }
|
||||
}
|
||||
}
|
||||
|
@ -344,6 +348,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
|
|||
pub crate_types: Vec<CrateType>,
|
||||
pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>,
|
||||
pub output_filenames: Arc<OutputFilenames>,
|
||||
pub invocation_temp: Option<String>,
|
||||
pub regular_module_config: Arc<ModuleConfig>,
|
||||
pub metadata_module_config: Arc<ModuleConfig>,
|
||||
pub allocator_module_config: Arc<ModuleConfig>,
|
||||
|
@ -582,8 +587,11 @@ fn produce_final_output_artifacts(
|
|||
if let [module] = &compiled_modules.modules[..] {
|
||||
// 1) Only one codegen unit. In this case it's no difficulty
|
||||
// to copy `foo.0.x` to `foo.x`.
|
||||
let module_name = Some(&module.name[..]);
|
||||
let path = crate_output.temp_path(output_type, module_name);
|
||||
let path = crate_output.temp_path_for_cgu(
|
||||
output_type,
|
||||
&module.name,
|
||||
sess.invocation_temp.as_deref(),
|
||||
);
|
||||
let output = crate_output.path(output_type);
|
||||
if !output_type.is_text_output() && output.is_tty() {
|
||||
sess.dcx()
|
||||
|
@ -596,22 +604,15 @@ fn produce_final_output_artifacts(
|
|||
ensure_removed(sess.dcx(), &path);
|
||||
}
|
||||
} else {
|
||||
let extension = crate_output
|
||||
.temp_path(output_type, None)
|
||||
.extension()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
|
||||
if crate_output.outputs.contains_explicit_name(&output_type) {
|
||||
// 2) Multiple codegen units, with `--emit foo=some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(errors::IgnoringEmitPath { extension });
|
||||
sess.dcx()
|
||||
.emit_warn(errors::IgnoringEmitPath { extension: output_type.extension() });
|
||||
} else if crate_output.single_output_file.is_some() {
|
||||
// 3) Multiple codegen units, with `-o some_name`. We have
|
||||
// no good solution for this case, so warn the user.
|
||||
sess.dcx().emit_warn(errors::IgnoringOutput { extension });
|
||||
sess.dcx().emit_warn(errors::IgnoringOutput { extension: output_type.extension() });
|
||||
} else {
|
||||
// 4) Multiple codegen units, but no explicit name. We
|
||||
// just leave the `foo.0.x` files in place.
|
||||
|
@ -967,7 +968,12 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
|
|||
module.source.saved_files.get("dwo").as_ref().and_then(|saved_dwarf_object_file| {
|
||||
let dwarf_obj_out = cgcx
|
||||
.output_filenames
|
||||
.split_dwarf_path(cgcx.split_debuginfo, cgcx.split_dwarf_kind, Some(&module.name))
|
||||
.split_dwarf_path(
|
||||
cgcx.split_debuginfo,
|
||||
cgcx.split_dwarf_kind,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
)
|
||||
.expect(
|
||||
"saved dwarf object in work product but `split_dwarf_path` returned `None`",
|
||||
);
|
||||
|
@ -977,7 +983,11 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
|
|||
let mut load_from_incr_cache = |perform, output_type: OutputType| {
|
||||
if perform {
|
||||
let saved_file = module.source.saved_files.get(output_type.extension())?;
|
||||
let output_path = cgcx.output_filenames.temp_path(output_type, Some(&module.name));
|
||||
let output_path = cgcx.output_filenames.temp_path_for_cgu(
|
||||
output_type,
|
||||
&module.name,
|
||||
cgcx.invocation_temp.as_deref(),
|
||||
);
|
||||
load_from_incr_comp_dir(output_path, &saved_file)
|
||||
} else {
|
||||
None
|
||||
|
@ -1222,6 +1232,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
|
|||
split_dwarf_kind: tcx.sess.opts.unstable_opts.split_dwarf_kind,
|
||||
parallel: backend.supports_parallel() && !sess.opts.unstable_opts.no_parallel_backend,
|
||||
pointer_size: tcx.data_layout.pointer_size,
|
||||
invocation_temp: sess.invocation_temp.clone(),
|
||||
};
|
||||
|
||||
// This is the "main loop" of parallel work happening for parallel codegen.
|
||||
|
|
|
@ -10,7 +10,7 @@ use rustc_ast::expand::allocator::{ALLOCATOR_METHODS, AllocatorKind, global_fn_n
|
|||
use rustc_attr_parsing::OptimizeAttr;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
|
||||
use rustc_data_structures::sync::par_map;
|
||||
use rustc_data_structures::sync::{IntoDynSyncSend, par_map};
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
|
@ -640,8 +640,11 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
|||
let metadata_cgu_name =
|
||||
cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("metadata")).to_string();
|
||||
tcx.sess.time("write_compressed_metadata", || {
|
||||
let file_name =
|
||||
tcx.output_filenames(()).temp_path(OutputType::Metadata, Some(&metadata_cgu_name));
|
||||
let file_name = tcx.output_filenames(()).temp_path_for_cgu(
|
||||
OutputType::Metadata,
|
||||
&metadata_cgu_name,
|
||||
tcx.sess.invocation_temp.as_deref(),
|
||||
);
|
||||
let data = create_compressed_metadata_file(
|
||||
tcx.sess,
|
||||
&metadata,
|
||||
|
@ -757,7 +760,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
|||
|
||||
let pre_compiled_cgus = par_map(cgus, |(i, _)| {
|
||||
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
|
||||
(i, module)
|
||||
(i, IntoDynSyncSend(module))
|
||||
});
|
||||
|
||||
total_codegen_time += start_time.elapsed();
|
||||
|
@ -777,7 +780,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
|||
match cgu_reuse {
|
||||
CguReuse::No => {
|
||||
let (module, cost) = if let Some(cgu) = pre_compiled_cgus.remove(&i) {
|
||||
cgu
|
||||
cgu.0
|
||||
} else {
|
||||
let start_time = Instant::now();
|
||||
let module = backend.compile_codegen_unit(tcx, cgu.name());
|
||||
|
|
|
@ -277,13 +277,13 @@ pub struct BinaryOutputToTty {
|
|||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_ignoring_emit_path)]
|
||||
pub struct IgnoringEmitPath {
|
||||
pub extension: String,
|
||||
pub extension: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_ignoring_output)]
|
||||
pub struct IgnoringOutput {
|
||||
pub extension: String,
|
||||
pub extension: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
|
|
|
@ -105,13 +105,19 @@ impl<M> ModuleCodegen<M> {
|
|||
emit_asm: bool,
|
||||
emit_ir: bool,
|
||||
outputs: &OutputFilenames,
|
||||
invocation_temp: Option<&str>,
|
||||
) -> CompiledModule {
|
||||
let object = emit_obj.then(|| outputs.temp_path(OutputType::Object, Some(&self.name)));
|
||||
let dwarf_object = emit_dwarf_obj.then(|| outputs.temp_path_dwo(Some(&self.name)));
|
||||
let bytecode = emit_bc.then(|| outputs.temp_path(OutputType::Bitcode, Some(&self.name)));
|
||||
let assembly = emit_asm.then(|| outputs.temp_path(OutputType::Assembly, Some(&self.name)));
|
||||
let llvm_ir =
|
||||
emit_ir.then(|| outputs.temp_path(OutputType::LlvmAssembly, Some(&self.name)));
|
||||
let object = emit_obj
|
||||
.then(|| outputs.temp_path_for_cgu(OutputType::Object, &self.name, invocation_temp));
|
||||
let dwarf_object =
|
||||
emit_dwarf_obj.then(|| outputs.temp_path_dwo_for_cgu(&self.name, invocation_temp));
|
||||
let bytecode = emit_bc
|
||||
.then(|| outputs.temp_path_for_cgu(OutputType::Bitcode, &self.name, invocation_temp));
|
||||
let assembly = emit_asm
|
||||
.then(|| outputs.temp_path_for_cgu(OutputType::Assembly, &self.name, invocation_temp));
|
||||
let llvm_ir = emit_ir.then(|| {
|
||||
outputs.temp_path_for_cgu(OutputType::LlvmAssembly, &self.name, invocation_temp)
|
||||
});
|
||||
|
||||
CompiledModule {
|
||||
name: self.name.clone(),
|
||||
|
|
|
@ -14,7 +14,7 @@ indexmap = "2.4.0"
|
|||
jobserver_crate = { version = "0.1.28", package = "jobserver" }
|
||||
measureme = "12.0.1"
|
||||
rustc-hash = "2.0.0"
|
||||
rustc-rayon = { version = "0.5.1", features = ["indexmap"] }
|
||||
rustc-rayon-core = { version = "0.5.0" }
|
||||
rustc-stable-hash = { version = "0.1.0", features = ["nightly"] }
|
||||
rustc_arena = { path = "../rustc_arena" }
|
||||
rustc_graphviz = { path = "../rustc_graphviz" }
|
||||
|
|
|
@ -179,6 +179,12 @@ impl<T> FromDyn<T> {
|
|||
FromDyn(val)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn derive<O>(&self, val: O) -> FromDyn<O> {
|
||||
// We already did the check for `sync::is_dyn_thread_safe()` when creating `Self`
|
||||
FromDyn(val)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
|
@ -200,6 +206,13 @@ impl<T> std::ops::Deref for FromDyn<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> std::ops::DerefMut for FromDyn<T> {
|
||||
#[inline(always)]
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
// A wrapper to convert a struct that is already a `Send` or `Sync` into
|
||||
// an instance of `DynSend` and `DynSync`, since the compiler cannot infer
|
||||
// it automatically in some cases. (e.g. Box<dyn Send / Sync>)
|
||||
|
|
|
@ -88,7 +88,7 @@ impl<T> FreezeLock<T> {
|
|||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn write(&self) -> FreezeWriteGuard<'_, T> {
|
||||
self.try_write().expect("still mutable")
|
||||
self.try_write().expect("data should not be frozen if we're still attempting to mutate it")
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -7,7 +7,6 @@ use std::any::Any;
|
|||
use std::panic::{AssertUnwindSafe, catch_unwind, resume_unwind};
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator};
|
||||
|
||||
use crate::FatalErrorMarker;
|
||||
use crate::sync::{DynSend, DynSync, FromDyn, IntoDynSyncSend, mode};
|
||||
|
@ -97,11 +96,11 @@ macro_rules! parallel {
|
|||
// This function only works when `mode::is_dyn_thread_safe()`.
|
||||
pub fn scope<'scope, OP, R>(op: OP) -> R
|
||||
where
|
||||
OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend,
|
||||
OP: FnOnce(&rayon_core::Scope<'scope>) -> R + DynSend,
|
||||
R: DynSend,
|
||||
{
|
||||
let op = FromDyn::from(op);
|
||||
rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
|
||||
rayon_core::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -114,7 +113,7 @@ where
|
|||
let oper_a = FromDyn::from(oper_a);
|
||||
let oper_b = FromDyn::from(oper_b);
|
||||
let (a, b) = parallel_guard(|guard| {
|
||||
rayon::join(
|
||||
rayon_core::join(
|
||||
move || guard.run(move || FromDyn::from(oper_a.into_inner()())),
|
||||
move || guard.run(move || FromDyn::from(oper_b.into_inner()())),
|
||||
)
|
||||
|
@ -125,56 +124,103 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<I, T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>>(
|
||||
fn par_slice<I: DynSend>(
|
||||
items: &mut [I],
|
||||
guard: &ParallelGuard,
|
||||
for_each: impl Fn(&mut I) + DynSync + DynSend,
|
||||
) {
|
||||
struct State<'a, F> {
|
||||
for_each: FromDyn<F>,
|
||||
guard: &'a ParallelGuard,
|
||||
group: usize,
|
||||
}
|
||||
|
||||
fn par_rec<I: DynSend, F: Fn(&mut I) + DynSync + DynSend>(
|
||||
items: &mut [I],
|
||||
state: &State<'_, F>,
|
||||
) {
|
||||
if items.len() <= state.group {
|
||||
for item in items {
|
||||
state.guard.run(|| (state.for_each)(item));
|
||||
}
|
||||
} else {
|
||||
let (left, right) = items.split_at_mut(items.len() / 2);
|
||||
let mut left = state.for_each.derive(left);
|
||||
let mut right = state.for_each.derive(right);
|
||||
rayon_core::join(move || par_rec(*left, state), move || par_rec(*right, state));
|
||||
}
|
||||
}
|
||||
|
||||
let state = State {
|
||||
for_each: FromDyn::from(for_each),
|
||||
guard,
|
||||
group: std::cmp::max(items.len() / 128, 1),
|
||||
};
|
||||
par_rec(items, &state)
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<I: DynSend, T: IntoIterator<Item = I>>(
|
||||
t: T,
|
||||
for_each: impl Fn(I) + DynSync + DynSend,
|
||||
for_each: impl Fn(&I) + DynSync + DynSend,
|
||||
) {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let for_each = FromDyn::from(for_each);
|
||||
t.into_par_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
});
|
||||
let mut items: Vec<_> = t.into_iter().collect();
|
||||
par_slice(&mut items, guard, |i| for_each(&*i))
|
||||
} else {
|
||||
t.into_iter().for_each(|i| {
|
||||
guard.run(|| for_each(i));
|
||||
guard.run(|| for_each(&i));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn try_par_for_each_in<
|
||||
T: IntoIterator + IntoParallelIterator<Item = <T as IntoIterator>::Item>,
|
||||
E: Send,
|
||||
>(
|
||||
/// This runs `for_each` in parallel for each iterator item. If one or more of the
|
||||
/// `for_each` calls returns `Err`, the function will also return `Err`. The error returned
|
||||
/// will be non-deterministic, but this is expected to be used with `ErrorGuaranteed` which
|
||||
/// are all equivalent.
|
||||
pub fn try_par_for_each_in<T: IntoIterator, E: DynSend>(
|
||||
t: T,
|
||||
for_each: impl Fn(<T as IntoIterator>::Item) -> Result<(), E> + DynSync + DynSend,
|
||||
) -> Result<(), E> {
|
||||
for_each: impl Fn(&<T as IntoIterator>::Item) -> Result<(), E> + DynSync + DynSend,
|
||||
) -> Result<(), E>
|
||||
where
|
||||
<T as IntoIterator>::Item: DynSend,
|
||||
{
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let for_each = FromDyn::from(for_each);
|
||||
t.into_par_iter()
|
||||
.filter_map(|i| guard.run(|| for_each(i)))
|
||||
.reduce(|| Ok(()), Result::and)
|
||||
let mut items: Vec<_> = t.into_iter().collect();
|
||||
|
||||
let error = Mutex::new(None);
|
||||
|
||||
par_slice(&mut items, guard, |i| {
|
||||
if let Err(err) = for_each(&*i) {
|
||||
*error.lock() = Some(err);
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(err) = error.into_inner() { Err(err) } else { Ok(()) }
|
||||
} else {
|
||||
t.into_iter().filter_map(|i| guard.run(|| for_each(i))).fold(Ok(()), Result::and)
|
||||
t.into_iter().filter_map(|i| guard.run(|| for_each(&i))).fold(Ok(()), Result::and)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn par_map<
|
||||
I,
|
||||
T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>,
|
||||
R: std::marker::Send,
|
||||
C: FromIterator<R> + FromParallelIterator<R>,
|
||||
>(
|
||||
pub fn par_map<I: DynSend, T: IntoIterator<Item = I>, R: DynSend, C: FromIterator<R>>(
|
||||
t: T,
|
||||
map: impl Fn(I) -> R + DynSync + DynSend,
|
||||
) -> C {
|
||||
parallel_guard(|guard| {
|
||||
if mode::is_dyn_thread_safe() {
|
||||
let map = FromDyn::from(map);
|
||||
t.into_par_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
|
||||
let mut items: Vec<(Option<I>, Option<R>)> =
|
||||
t.into_iter().map(|i| (Some(i), None)).collect();
|
||||
|
||||
par_slice(&mut items, guard, |i| {
|
||||
i.1 = Some(map(i.0.take().unwrap()));
|
||||
});
|
||||
|
||||
items.into_iter().filter_map(|i| i.1).collect()
|
||||
} else {
|
||||
t.into_iter().filter_map(|i| guard.run(|| map(i))).collect()
|
||||
}
|
||||
|
|
|
@ -109,6 +109,16 @@ impl<T, I: Iterator<Item = T>> UnordItems<T, I> {
|
|||
pub fn collect<C: From<UnordItems<T, I>>>(self) -> C {
|
||||
self.into()
|
||||
}
|
||||
|
||||
/// If the iterator has only one element, returns it, otherwise returns `None`.
|
||||
#[track_caller]
|
||||
pub fn get_only(mut self) -> Option<T> {
|
||||
let item = self.0.next();
|
||||
if self.0.next().is_some() {
|
||||
return None;
|
||||
}
|
||||
item
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> UnordItems<T, std::iter::Empty<T>> {
|
||||
|
|
|
@ -264,6 +264,7 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
|
|||
hash_untracked_state: None,
|
||||
register_lints: None,
|
||||
override_queries: None,
|
||||
extra_symbols: Vec::new(),
|
||||
make_codegen_backend: None,
|
||||
registry: diagnostics_registry(),
|
||||
using_internal_features: &USING_INTERNAL_FEATURES,
|
||||
|
@ -348,10 +349,6 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
|
|||
// Make sure name resolution and macro expansion is run.
|
||||
let _ = tcx.resolver_for_lowering();
|
||||
|
||||
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
|
||||
dump_feature_usage_metrics(tcx, metrics_dir);
|
||||
}
|
||||
|
||||
if callbacks.after_expansion(compiler, tcx) == Compilation::Stop {
|
||||
return early_exit();
|
||||
}
|
||||
|
@ -370,6 +367,10 @@ pub fn run_compiler(at_args: &[String], callbacks: &mut (dyn Callbacks + Send))
|
|||
|
||||
tcx.ensure_ok().analysis(());
|
||||
|
||||
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
|
||||
dump_feature_usage_metrics(tcx, metrics_dir);
|
||||
}
|
||||
|
||||
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
|
||||
return early_exit();
|
||||
}
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
#### Note: this error code is no longer emitted by the compiler.
|
||||
|
||||
An intrinsic was declared without being a function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0622
|
||||
```no_run
|
||||
#![feature(intrinsics)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
|
|
|
@ -397,7 +397,7 @@ E0618: 0618,
|
|||
E0619: 0619,
|
||||
E0620: 0620,
|
||||
E0621: 0621,
|
||||
E0622: 0622,
|
||||
E0622: 0622, // REMOVED: rustc-intrinsic ABI was removed
|
||||
E0623: 0623,
|
||||
E0624: 0624,
|
||||
E0625: 0625,
|
||||
|
|
|
@ -1102,7 +1102,7 @@ pub trait ResolverExpand {
|
|||
/// HIR proc macros items back to their harness items.
|
||||
fn declare_proc_macro(&mut self, id: NodeId);
|
||||
|
||||
fn append_stripped_cfg_item(&mut self, parent_node: NodeId, name: Ident, cfg: ast::MetaItem);
|
||||
fn append_stripped_cfg_item(&mut self, parent_node: NodeId, ident: Ident, cfg: ast::MetaItem);
|
||||
|
||||
/// Tools registered with `#![register_tool]` and used by tool attributes and lints.
|
||||
fn registered_tools(&self) -> &RegisteredTools;
|
||||
|
|
|
@ -1169,9 +1169,9 @@ trait InvocationCollectorNode: HasAttrs + HasNodeId + Sized {
|
|||
collector.cx.dcx().emit_err(RemoveNodeNotSupported { span, descr: Self::descr() });
|
||||
}
|
||||
|
||||
/// All of the names (items) declared by this node.
|
||||
/// All of the identifiers (items) declared by this node.
|
||||
/// This is an approximation and should only be used for diagnostics.
|
||||
fn declared_names(&self) -> Vec<Ident> {
|
||||
fn declared_idents(&self) -> Vec<Ident> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
@ -1306,7 +1306,7 @@ impl InvocationCollectorNode for P<ast::Item> {
|
|||
res
|
||||
}
|
||||
|
||||
fn declared_names(&self) -> Vec<Ident> {
|
||||
fn declared_idents(&self) -> Vec<Ident> {
|
||||
if let ItemKind::Use(ut) = &self.kind {
|
||||
fn collect_use_tree_leaves(ut: &ast::UseTree, idents: &mut Vec<Ident>) {
|
||||
match &ut.kind {
|
||||
|
@ -2061,10 +2061,10 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
|
|||
}
|
||||
|
||||
if let Some(meta_item) = meta_item {
|
||||
for name in node.declared_names() {
|
||||
for ident in node.declared_idents() {
|
||||
self.cx.resolver.append_stripped_cfg_item(
|
||||
self.cx.current_expansion.lint_node_id,
|
||||
name,
|
||||
ident,
|
||||
meta_item.clone(),
|
||||
)
|
||||
}
|
||||
|
|
|
@ -267,11 +267,16 @@ impl DefKind {
|
|||
| DefKind::ForeignTy
|
||||
| DefKind::TraitAlias
|
||||
| DefKind::TyParam
|
||||
| DefKind::ExternCrate => DefPathData::TypeNs(Some(name.unwrap())),
|
||||
| DefKind::ExternCrate => DefPathData::TypeNs(name.unwrap()),
|
||||
|
||||
// An associated type names will be missing for an RPITIT. It will
|
||||
// later be given a name with `synthetic` in it, if necessary.
|
||||
DefKind::AssocTy => DefPathData::TypeNs(name),
|
||||
// An associated type name will be missing for an RPITIT.
|
||||
DefKind::AssocTy => {
|
||||
if let Some(name) = name {
|
||||
DefPathData::TypeNs(name)
|
||||
} else {
|
||||
DefPathData::AnonAssocTy
|
||||
}
|
||||
}
|
||||
|
||||
// It's not exactly an anon const, but wrt DefPathData, there
|
||||
// is no difference.
|
||||
|
|
|
@ -271,9 +271,8 @@ pub enum DefPathData {
|
|||
Use,
|
||||
/// A global asm item.
|
||||
GlobalAsm,
|
||||
/// Something in the type namespace. Will be empty for RPITIT associated
|
||||
/// types, which are given a synthetic name later, if necessary.
|
||||
TypeNs(Option<Symbol>),
|
||||
/// Something in the type namespace.
|
||||
TypeNs(Symbol),
|
||||
/// Something in the value namespace.
|
||||
ValueNs(Symbol),
|
||||
/// Something in the macro namespace.
|
||||
|
@ -291,6 +290,8 @@ pub enum DefPathData {
|
|||
/// An existential `impl Trait` type node.
|
||||
/// Argument position `impl Trait` have a `TypeNs` with their pretty-printed name.
|
||||
OpaqueTy,
|
||||
/// An anonymous associated type from an RPITIT.
|
||||
AnonAssocTy,
|
||||
/// A synthetic body for a coroutine's by-move body.
|
||||
SyntheticCoroutineBody,
|
||||
}
|
||||
|
@ -413,9 +414,7 @@ impl DefPathData {
|
|||
pub fn get_opt_name(&self) -> Option<Symbol> {
|
||||
use self::DefPathData::*;
|
||||
match *self {
|
||||
TypeNs(name) => name,
|
||||
|
||||
ValueNs(name) | MacroNs(name) | LifetimeNs(name) => Some(name),
|
||||
TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) => Some(name),
|
||||
|
||||
Impl
|
||||
| ForeignMod
|
||||
|
@ -426,6 +425,7 @@ impl DefPathData {
|
|||
| Ctor
|
||||
| AnonConst
|
||||
| OpaqueTy
|
||||
| AnonAssocTy
|
||||
| SyntheticCoroutineBody => None,
|
||||
}
|
||||
}
|
||||
|
@ -433,14 +433,9 @@ impl DefPathData {
|
|||
pub fn name(&self) -> DefPathDataName {
|
||||
use self::DefPathData::*;
|
||||
match *self {
|
||||
TypeNs(name) => {
|
||||
if let Some(name) = name {
|
||||
DefPathDataName::Named(name)
|
||||
} else {
|
||||
DefPathDataName::Anon { namespace: sym::synthetic }
|
||||
}
|
||||
TypeNs(name) | ValueNs(name) | MacroNs(name) | LifetimeNs(name) => {
|
||||
DefPathDataName::Named(name)
|
||||
}
|
||||
ValueNs(name) | MacroNs(name) | LifetimeNs(name) => DefPathDataName::Named(name),
|
||||
// Note that this does not show up in user print-outs.
|
||||
CrateRoot => DefPathDataName::Anon { namespace: kw::Crate },
|
||||
Impl => DefPathDataName::Anon { namespace: kw::Impl },
|
||||
|
@ -451,6 +446,7 @@ impl DefPathData {
|
|||
Ctor => DefPathDataName::Anon { namespace: sym::constructor },
|
||||
AnonConst => DefPathDataName::Anon { namespace: sym::constant },
|
||||
OpaqueTy => DefPathDataName::Anon { namespace: sym::opaque },
|
||||
AnonAssocTy => DefPathDataName::Anon { namespace: sym::anon_assoc },
|
||||
SyntheticCoroutineBody => DefPathDataName::Anon { namespace: sym::synthetic },
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3399,9 +3399,9 @@ pub struct BareFnTy<'hir> {
|
|||
pub abi: ExternAbi,
|
||||
pub generic_params: &'hir [GenericParam<'hir>],
|
||||
pub decl: &'hir FnDecl<'hir>,
|
||||
// `Option` because bare fn parameter names are optional. We also end up
|
||||
// `Option` because bare fn parameter identifiers are optional. We also end up
|
||||
// with `None` in some error cases, e.g. invalid parameter patterns.
|
||||
pub param_names: &'hir [Option<Ident>],
|
||||
pub param_idents: &'hir [Option<Ident>],
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, HashStable_Generic)]
|
||||
|
|
|
@ -652,10 +652,10 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>(
|
|||
try_visit!(visitor.visit_ident(foreign_item.ident));
|
||||
|
||||
match foreign_item.kind {
|
||||
ForeignItemKind::Fn(ref sig, param_names, ref generics) => {
|
||||
ForeignItemKind::Fn(ref sig, param_idents, ref generics) => {
|
||||
try_visit!(visitor.visit_generics(generics));
|
||||
try_visit!(visitor.visit_fn_decl(sig.decl));
|
||||
for ident in param_names.iter().copied() {
|
||||
for ident in param_idents.iter().copied() {
|
||||
visit_opt!(visitor, visit_ident, ident);
|
||||
}
|
||||
}
|
||||
|
@ -1169,9 +1169,9 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(
|
|||
try_visit!(visitor.visit_ty_unambig(ty));
|
||||
visit_opt!(visitor, visit_nested_body, default);
|
||||
}
|
||||
TraitItemKind::Fn(ref sig, TraitFn::Required(param_names)) => {
|
||||
TraitItemKind::Fn(ref sig, TraitFn::Required(param_idents)) => {
|
||||
try_visit!(visitor.visit_fn_decl(sig.decl));
|
||||
for ident in param_names.iter().copied() {
|
||||
for ident in param_idents.iter().copied() {
|
||||
visit_opt!(visitor, visit_ident, ident);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ fn def_path_hash_depends_on_crate_id() {
|
|||
// the crate by changing the crate disambiguator (e.g. via bumping the
|
||||
// crate's version number).
|
||||
|
||||
create_session_globals_then(Edition::Edition2024, None, || {
|
||||
create_session_globals_then(Edition::Edition2024, &[], None, || {
|
||||
let id0 = StableCrateId::new(Symbol::intern("foo"), false, vec!["1".to_string()], "");
|
||||
let id1 = StableCrateId::new(Symbol::intern("foo"), false, vec!["2".to_string()], "");
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
hir_analysis_ambiguous_assoc_item = ambiguous associated {$assoc_kind} `{$assoc_name}` in bounds of `{$qself}`
|
||||
.label = ambiguous associated {$assoc_kind} `{$assoc_name}`
|
||||
hir_analysis_ambiguous_assoc_item = ambiguous associated {$assoc_kind} `{$assoc_ident}` in bounds of `{$qself}`
|
||||
.label = ambiguous associated {$assoc_kind} `{$assoc_ident}`
|
||||
|
||||
hir_analysis_ambiguous_lifetime_bound =
|
||||
ambiguous lifetime bound, explicit lifetime bound required
|
||||
|
@ -12,13 +12,13 @@ hir_analysis_assoc_item_is_private = {$kind} `{$name}` is private
|
|||
.label = private {$kind}
|
||||
.defined_here_label = the {$kind} is defined here
|
||||
|
||||
hir_analysis_assoc_item_not_found = associated {$assoc_kind} `{$assoc_name}` not found for `{$qself}`
|
||||
hir_analysis_assoc_item_not_found = associated {$assoc_kind} `{$assoc_ident}` not found for `{$qself}`
|
||||
|
||||
hir_analysis_assoc_item_not_found_found_in_other_trait_label = there is {$identically_named ->
|
||||
[true] an
|
||||
*[false] a similarly named
|
||||
} associated {$assoc_kind} `{$suggested_name}` in the trait `{$trait_name}`
|
||||
hir_analysis_assoc_item_not_found_label = associated {$assoc_kind} `{$assoc_name}` not found
|
||||
hir_analysis_assoc_item_not_found_label = associated {$assoc_kind} `{$assoc_ident}` not found
|
||||
hir_analysis_assoc_item_not_found_other_sugg = `{$qself}` has the following associated {$assoc_kind}
|
||||
hir_analysis_assoc_item_not_found_similar_in_other_trait_qpath_sugg =
|
||||
consider fully qualifying{$identically_named ->
|
||||
|
|
|
@ -719,7 +719,6 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
|||
def_id,
|
||||
tcx.def_ident_span(def_id).unwrap(),
|
||||
i.name,
|
||||
ExternAbi::Rust,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -787,16 +786,6 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
|||
for item in items {
|
||||
let def_id = item.id.owner_id.def_id;
|
||||
|
||||
if tcx.has_attr(def_id, sym::rustc_intrinsic) {
|
||||
intrinsic::check_intrinsic_type(
|
||||
tcx,
|
||||
item.id.owner_id.def_id,
|
||||
item.span,
|
||||
item.ident.name,
|
||||
abi,
|
||||
);
|
||||
}
|
||||
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let own_counts = generics.own_counts();
|
||||
if generics.own_params.len() - own_counts.lifetimes != 0 {
|
||||
|
|
|
@ -1046,11 +1046,11 @@ fn report_trait_method_mismatch<'tcx>(
|
|||
// argument pattern and type.
|
||||
let (sig, body) = tcx.hir_expect_impl_item(impl_m.def_id.expect_local()).expect_fn();
|
||||
let span = tcx
|
||||
.hir_body_param_names(body)
|
||||
.hir_body_param_idents(body)
|
||||
.zip(sig.decl.inputs.iter())
|
||||
.map(|(param_name, ty)| {
|
||||
if let Some(param_name) = param_name {
|
||||
param_name.span.to(ty.span)
|
||||
.map(|(param_ident, ty)| {
|
||||
if let Some(param_ident) = param_ident {
|
||||
param_ident.span.to(ty.span)
|
||||
} else {
|
||||
ty.span
|
||||
}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
//! Type-checking for the `#[rustc_intrinsic]` intrinsics that the compiler exposes.
|
||||
|
||||
use rustc_abi::ExternAbi;
|
||||
use rustc_errors::codes::*;
|
||||
use rustc_errors::{DiagMessage, struct_span_code_err};
|
||||
use rustc_hir::{self as hir, Safety};
|
||||
use rustc_errors::DiagMessage;
|
||||
use rustc_hir::{self as hir};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::traits::{ObligationCause, ObligationCauseCode};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
|
@ -26,17 +25,10 @@ fn equate_intrinsic_type<'tcx>(
|
|||
sig: ty::PolyFnSig<'tcx>,
|
||||
) {
|
||||
let (generics, span) = match tcx.hir_node_by_def_id(def_id) {
|
||||
hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. })
|
||||
| hir::Node::ForeignItem(hir::ForeignItem {
|
||||
kind: hir::ForeignItemKind::Fn(_, _, generics),
|
||||
..
|
||||
}) => (tcx.generics_of(def_id), generics.span),
|
||||
_ => {
|
||||
struct_span_code_err!(tcx.dcx(), span, E0622, "intrinsic must be a function")
|
||||
.with_span_label(span, "expected a function")
|
||||
.emit();
|
||||
return;
|
||||
hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn { generics, .. }, .. }) => {
|
||||
(tcx.generics_of(def_id), generics.span)
|
||||
}
|
||||
_ => tcx.dcx().span_bug(span, "intrinsic must be a function"),
|
||||
};
|
||||
let own_counts = generics.own_counts();
|
||||
|
||||
|
@ -70,13 +62,7 @@ fn equate_intrinsic_type<'tcx>(
|
|||
}
|
||||
|
||||
/// Returns the unsafety of the given intrinsic.
|
||||
pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hir::Safety {
|
||||
let has_safe_attr = if tcx.has_attr(intrinsic_id, sym::rustc_intrinsic) {
|
||||
tcx.fn_sig(intrinsic_id).skip_binder().safety()
|
||||
} else {
|
||||
// Old-style intrinsics are never safe
|
||||
Safety::Unsafe
|
||||
};
|
||||
fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hir::Safety {
|
||||
let is_in_list = match tcx.item_name(intrinsic_id.into()) {
|
||||
// When adding a new intrinsic to this list,
|
||||
// it's usually worth updating that intrinsic's documentation
|
||||
|
@ -148,7 +134,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -
|
|||
_ => hir::Safety::Unsafe,
|
||||
};
|
||||
|
||||
if has_safe_attr != is_in_list {
|
||||
if tcx.fn_sig(intrinsic_id).skip_binder().safety() != is_in_list {
|
||||
tcx.dcx().struct_span_err(
|
||||
tcx.def_span(intrinsic_id),
|
||||
DiagMessage::from(format!(
|
||||
|
@ -163,12 +149,11 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -
|
|||
|
||||
/// Remember to add all intrinsics here, in `compiler/rustc_codegen_llvm/src/intrinsic.rs`,
|
||||
/// and in `library/core/src/intrinsics.rs`.
|
||||
pub fn check_intrinsic_type(
|
||||
pub(crate) fn check_intrinsic_type(
|
||||
tcx: TyCtxt<'_>,
|
||||
intrinsic_id: LocalDefId,
|
||||
span: Span,
|
||||
intrinsic_name: Symbol,
|
||||
abi: ExternAbi,
|
||||
) {
|
||||
let generics = tcx.generics_of(intrinsic_id);
|
||||
let param = |n| {
|
||||
|
@ -674,8 +659,12 @@ pub fn check_intrinsic_type(
|
|||
sym::simd_masked_load => (3, 0, vec![param(0), param(1), param(2)], param(2)),
|
||||
sym::simd_masked_store => (3, 0, vec![param(0), param(1), param(2)], tcx.types.unit),
|
||||
sym::simd_scatter => (3, 0, vec![param(0), param(1), param(2)], tcx.types.unit),
|
||||
sym::simd_insert => (2, 0, vec![param(0), tcx.types.u32, param(1)], param(0)),
|
||||
sym::simd_extract => (2, 0, vec![param(0), tcx.types.u32], param(1)),
|
||||
sym::simd_insert | sym::simd_insert_dyn => {
|
||||
(2, 0, vec![param(0), tcx.types.u32, param(1)], param(0))
|
||||
}
|
||||
sym::simd_extract | sym::simd_extract_dyn => {
|
||||
(2, 0, vec![param(0), tcx.types.u32], param(1))
|
||||
}
|
||||
sym::simd_cast
|
||||
| sym::simd_as
|
||||
| sym::simd_cast_ptr
|
||||
|
@ -706,7 +695,7 @@ pub fn check_intrinsic_type(
|
|||
};
|
||||
(n_tps, 0, n_cts, inputs, output, safety)
|
||||
};
|
||||
let sig = tcx.mk_fn_sig(inputs, output, false, safety, abi);
|
||||
let sig = tcx.mk_fn_sig(inputs, output, false, safety, ExternAbi::Rust);
|
||||
let sig = ty::Binder::bind_with_vars(sig, bound_vars);
|
||||
equate_intrinsic_type(tcx, span, intrinsic_id, n_tps, n_lts, n_cts, sig)
|
||||
}
|
||||
|
|
|
@ -439,9 +439,9 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> {
|
|||
&self,
|
||||
span: Span,
|
||||
def_id: LocalDefId,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_name))
|
||||
self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_ident))
|
||||
}
|
||||
|
||||
fn lower_assoc_shared(
|
||||
|
|
|
@ -584,12 +584,12 @@ pub(super) fn explicit_super_predicates_of<'tcx>(
|
|||
|
||||
pub(super) fn explicit_supertraits_containing_assoc_item<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
(trait_def_id, assoc_name): (DefId, Ident),
|
||||
(trait_def_id, assoc_ident): (DefId, Ident),
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
implied_predicates_with_filter(
|
||||
tcx,
|
||||
trait_def_id,
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_name),
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_ident),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -617,7 +617,7 @@ pub(super) fn implied_predicates_with_filter<'tcx>(
|
|||
filter: PredicateFilter,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
let Some(trait_def_id) = trait_def_id.as_local() else {
|
||||
// if `assoc_name` is None, then the query should've been redirected to an
|
||||
// if `assoc_ident` is None, then the query should've been redirected to an
|
||||
// external provider
|
||||
assert_matches!(filter, PredicateFilter::SelfTraitThatDefines(_));
|
||||
return tcx.explicit_super_predicates_of(trait_def_id);
|
||||
|
@ -834,11 +834,11 @@ pub(super) fn assert_only_contains_predicates_from<'tcx>(
|
|||
#[instrument(level = "trace", skip(tcx))]
|
||||
pub(super) fn type_param_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
(item_def_id, def_id, assoc_name): (LocalDefId, LocalDefId, Ident),
|
||||
(item_def_id, def_id, assoc_ident): (LocalDefId, LocalDefId, Ident),
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
match tcx.opt_rpitit_info(item_def_id.to_def_id()) {
|
||||
Some(ty::ImplTraitInTraitData::Trait { opaque_def_id, .. }) => {
|
||||
return tcx.type_param_predicates((opaque_def_id.expect_local(), def_id, assoc_name));
|
||||
return tcx.type_param_predicates((opaque_def_id.expect_local(), def_id, assoc_ident));
|
||||
}
|
||||
Some(ty::ImplTraitInTraitData::Impl { .. }) => {
|
||||
unreachable!("should not be lowering bounds on RPITIT in impl")
|
||||
|
@ -863,7 +863,7 @@ pub(super) fn type_param_predicates<'tcx>(
|
|||
|
||||
let result = if let Some(parent) = parent {
|
||||
let icx = ItemCtxt::new(tcx, parent);
|
||||
icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_name)
|
||||
icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_ident)
|
||||
} else {
|
||||
ty::EarlyBinder::bind(&[] as &[_])
|
||||
};
|
||||
|
@ -889,7 +889,7 @@ pub(super) fn type_param_predicates<'tcx>(
|
|||
let extra_predicates = extend.into_iter().chain(icx.probe_ty_param_bounds_in_generics(
|
||||
hir_generics,
|
||||
def_id,
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_name),
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_ident),
|
||||
));
|
||||
|
||||
let bounds =
|
||||
|
@ -908,7 +908,7 @@ pub(super) fn type_param_predicates<'tcx>(
|
|||
_ => unreachable!(),
|
||||
};
|
||||
assert_only_contains_predicates_from(
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_name),
|
||||
PredicateFilter::SelfTraitThatDefines(assoc_ident),
|
||||
bounds,
|
||||
self_ty,
|
||||
);
|
||||
|
|
|
@ -1874,13 +1874,13 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
|
|||
fn supertrait_hrtb_vars(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: DefId,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
assoc_kind: ty::AssocKind,
|
||||
) -> Option<(Vec<ty::BoundVariableKind>, &'tcx ty::AssocItem)> {
|
||||
let trait_defines_associated_item_named = |trait_def_id: DefId| {
|
||||
tcx.associated_items(trait_def_id).find_by_name_and_kind(
|
||||
tcx.associated_items(trait_def_id).find_by_ident_and_kind(
|
||||
tcx,
|
||||
assoc_name,
|
||||
assoc_ident,
|
||||
assoc_kind,
|
||||
trait_def_id,
|
||||
)
|
||||
|
@ -1904,7 +1904,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
|
|||
if let Some(assoc_item) = trait_defines_associated_item_named(def_id) {
|
||||
break Some((bound_vars.into_iter().collect(), assoc_item));
|
||||
}
|
||||
let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_name));
|
||||
let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_ident));
|
||||
let obligations = predicates.iter_identity_copied().filter_map(|(pred, _)| {
|
||||
let bound_predicate = pred.kind();
|
||||
match bound_predicate.skip_binder() {
|
||||
|
|
|
@ -23,7 +23,7 @@ pub(crate) struct AmbiguousAssocItem<'a> {
|
|||
#[label]
|
||||
pub span: Span,
|
||||
pub assoc_kind: &'static str,
|
||||
pub assoc_name: Ident,
|
||||
pub assoc_ident: Ident,
|
||||
pub qself: &'a str,
|
||||
}
|
||||
|
||||
|
@ -75,7 +75,7 @@ pub(crate) struct AssocItemIsPrivate {
|
|||
pub(crate) struct AssocItemNotFound<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub assoc_name: Ident,
|
||||
pub assoc_ident: Ident,
|
||||
pub assoc_kind: &'static str,
|
||||
pub qself: &'a str,
|
||||
#[subdiagnostic]
|
||||
|
|
|
@ -363,10 +363,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
for hir_bound in hir_bounds {
|
||||
// In order to avoid cycles, when we're lowering `SelfTraitThatDefines`,
|
||||
// we skip over any traits that don't define the given associated type.
|
||||
if let PredicateFilter::SelfTraitThatDefines(assoc_name) = predicate_filter {
|
||||
if let PredicateFilter::SelfTraitThatDefines(assoc_ident) = predicate_filter {
|
||||
if let Some(trait_ref) = hir_bound.trait_ref()
|
||||
&& let Some(trait_did) = trait_ref.trait_def_id()
|
||||
&& self.tcx().trait_may_define_assoc_item(trait_did, assoc_name)
|
||||
&& self.tcx().trait_may_define_assoc_item(trait_did, assoc_ident)
|
||||
{
|
||||
// Okay
|
||||
} else {
|
||||
|
|
|
@ -49,13 +49,13 @@ pub(crate) fn validate_cmse_abi<'tcx>(
|
|||
Ok(Err(index)) => {
|
||||
// fn(x: u32, u32, u32, u16, y: u16) -> u32,
|
||||
// ^^^^^^
|
||||
let span = if let Some(ident) = bare_fn_ty.param_names[index] {
|
||||
let span = if let Some(ident) = bare_fn_ty.param_idents[index] {
|
||||
ident.span.to(bare_fn_ty.decl.inputs[index].span)
|
||||
} else {
|
||||
bare_fn_ty.decl.inputs[index].span
|
||||
}
|
||||
.to(bare_fn_ty.decl.inputs.last().unwrap().span);
|
||||
let plural = bare_fn_ty.param_names.len() - index != 1;
|
||||
let plural = bare_fn_ty.param_idents.len() - index != 1;
|
||||
dcx.emit_err(errors::CmseInputsStackSpill { span, plural, abi });
|
||||
}
|
||||
Err(layout_err) => {
|
||||
|
|
|
@ -117,7 +117,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
all_candidates: impl Fn() -> I,
|
||||
qself: AssocItemQSelf,
|
||||
assoc_kind: ty::AssocKind,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
span: Span,
|
||||
constraint: Option<&hir::AssocItemConstraint<'tcx>>,
|
||||
) -> ErrorGuaranteed
|
||||
|
@ -129,11 +129,15 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
// First and foremost, provide a more user-friendly & “intuitive” error on kind mismatches.
|
||||
if let Some(assoc_item) = all_candidates().find_map(|r| {
|
||||
tcx.associated_items(r.def_id())
|
||||
.filter_by_name_unhygienic(assoc_name.name)
|
||||
.find(|item| tcx.hygienic_eq(assoc_name, item.ident(tcx), r.def_id()))
|
||||
.filter_by_name_unhygienic(assoc_ident.name)
|
||||
.find(|item| tcx.hygienic_eq(assoc_ident, item.ident(tcx), r.def_id()))
|
||||
}) {
|
||||
return self.complain_about_assoc_kind_mismatch(
|
||||
assoc_item, assoc_kind, assoc_name, span, constraint,
|
||||
assoc_item,
|
||||
assoc_kind,
|
||||
assoc_ident,
|
||||
span,
|
||||
constraint,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -142,18 +146,18 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
|
||||
// The fallback span is needed because `assoc_name` might be an `Fn()`'s `Output` without a
|
||||
// valid span, so we point at the whole path segment instead.
|
||||
let is_dummy = assoc_name.span == DUMMY_SP;
|
||||
let is_dummy = assoc_ident.span == DUMMY_SP;
|
||||
|
||||
let mut err = errors::AssocItemNotFound {
|
||||
span: if is_dummy { span } else { assoc_name.span },
|
||||
assoc_name,
|
||||
span: if is_dummy { span } else { assoc_ident.span },
|
||||
assoc_ident,
|
||||
assoc_kind: assoc_kind_str,
|
||||
qself: &qself_str,
|
||||
label: None,
|
||||
sugg: None,
|
||||
// Try to get the span of the identifier within the path's syntax context
|
||||
// (if that's different).
|
||||
within_macro_span: assoc_name.span.within_macro(span, tcx.sess.source_map()),
|
||||
within_macro_span: assoc_ident.span.within_macro(span, tcx.sess.source_map()),
|
||||
};
|
||||
|
||||
if is_dummy {
|
||||
|
@ -169,10 +173,10 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
.collect();
|
||||
|
||||
if let Some(suggested_name) =
|
||||
find_best_match_for_name(&all_candidate_names, assoc_name.name, None)
|
||||
find_best_match_for_name(&all_candidate_names, assoc_ident.name, None)
|
||||
{
|
||||
err.sugg = Some(errors::AssocItemNotFoundSugg::Similar {
|
||||
span: assoc_name.span,
|
||||
span: assoc_ident.span,
|
||||
assoc_kind: assoc_kind_str,
|
||||
suggested_name,
|
||||
});
|
||||
|
@ -201,7 +205,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
.collect();
|
||||
|
||||
if let Some(suggested_name) =
|
||||
find_best_match_for_name(&wider_candidate_names, assoc_name.name, None)
|
||||
find_best_match_for_name(&wider_candidate_names, assoc_ident.name, None)
|
||||
{
|
||||
if let [best_trait] = visible_traits
|
||||
.iter()
|
||||
|
@ -215,11 +219,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
{
|
||||
let trait_name = tcx.def_path_str(best_trait);
|
||||
err.label = Some(errors::AssocItemNotFoundLabel::FoundInOtherTrait {
|
||||
span: assoc_name.span,
|
||||
span: assoc_ident.span,
|
||||
assoc_kind: assoc_kind_str,
|
||||
trait_name: &trait_name,
|
||||
suggested_name,
|
||||
identically_named: suggested_name == assoc_name.name,
|
||||
identically_named: suggested_name == assoc_ident.name,
|
||||
});
|
||||
if let AssocItemQSelf::TyParam(ty_param_def_id, ty_param_span) = qself
|
||||
// Not using `self.item_def_id()` here as that would yield the opaque type itself if we're
|
||||
|
@ -246,7 +250,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
// The type param already has a bound for `trait_name`, we just need to
|
||||
// change the associated item.
|
||||
err.sugg = Some(errors::AssocItemNotFoundSugg::SimilarInOtherTrait {
|
||||
span: assoc_name.span,
|
||||
span: assoc_ident.span,
|
||||
assoc_kind: assoc_kind_str,
|
||||
suggested_name,
|
||||
});
|
||||
|
@ -265,7 +269,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
Applicability::MaybeIncorrect
|
||||
};
|
||||
|
||||
let identically_named = suggested_name == assoc_name.name;
|
||||
let identically_named = suggested_name == assoc_ident.name;
|
||||
|
||||
if let DefKind::TyAlias = tcx.def_kind(item_def_id)
|
||||
&& !tcx.type_alias_is_lazy(item_def_id)
|
||||
|
@ -273,7 +277,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
err.sugg = Some(errors::AssocItemNotFoundSugg::SimilarInOtherTraitQPath {
|
||||
lo: ty_param_span.shrink_to_lo(),
|
||||
mi: ty_param_span.shrink_to_hi(),
|
||||
hi: (!identically_named).then_some(assoc_name.span),
|
||||
hi: (!identically_named).then_some(assoc_ident.span),
|
||||
trait_ref,
|
||||
identically_named,
|
||||
suggested_name,
|
||||
|
@ -294,7 +298,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
// We suggested constraining a type parameter, but the associated item on it
|
||||
// was also not an exact match, so we also suggest changing it.
|
||||
err.span_suggestion_verbose(
|
||||
assoc_name.span,
|
||||
assoc_ident.span,
|
||||
fluent::hir_analysis_assoc_item_not_found_similar_in_other_trait_with_bound_sugg,
|
||||
suggested_name,
|
||||
Applicability::MaybeIncorrect,
|
||||
|
@ -311,13 +315,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
// suggest using it.
|
||||
if let [candidate_name] = all_candidate_names.as_slice() {
|
||||
err.sugg = Some(errors::AssocItemNotFoundSugg::Other {
|
||||
span: assoc_name.span,
|
||||
span: assoc_ident.span,
|
||||
qself: &qself_str,
|
||||
assoc_kind: assoc_kind_str,
|
||||
suggested_name: *candidate_name,
|
||||
});
|
||||
} else {
|
||||
err.label = Some(errors::AssocItemNotFoundLabel::NotFound { span: assoc_name.span });
|
||||
err.label = Some(errors::AssocItemNotFoundLabel::NotFound { span: assoc_ident.span });
|
||||
}
|
||||
|
||||
self.dcx().emit_err(err)
|
||||
|
@ -805,7 +809,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
return None;
|
||||
};
|
||||
|
||||
let assoc_item = tcx.associated_items(trait_def).find_by_name_and_kind(
|
||||
let assoc_item = tcx.associated_items(trait_def).find_by_ident_and_kind(
|
||||
tcx,
|
||||
ident,
|
||||
ty::AssocKind::Type,
|
||||
|
|
|
@ -147,7 +147,7 @@ pub trait HirTyLowerer<'tcx> {
|
|||
&self,
|
||||
span: Span,
|
||||
def_id: LocalDefId,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]>;
|
||||
|
||||
/// Lower an associated type/const (from a trait) to a projection.
|
||||
|
@ -933,11 +933,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
&self,
|
||||
trait_def_id: DefId,
|
||||
assoc_kind: ty::AssocKind,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
) -> bool {
|
||||
self.tcx()
|
||||
.associated_items(trait_def_id)
|
||||
.find_by_name_and_kind(self.tcx(), assoc_name, assoc_kind, trait_def_id)
|
||||
.find_by_ident_and_kind(self.tcx(), assoc_ident, assoc_kind, trait_def_id)
|
||||
.is_some()
|
||||
}
|
||||
|
||||
|
@ -964,7 +964,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
}
|
||||
|
||||
/// Search for a trait bound on a type parameter whose trait defines the associated item
|
||||
/// given by `assoc_name` and `kind`.
|
||||
/// given by `assoc_ident` and `kind`.
|
||||
///
|
||||
/// This fails if there is no such bound in the list of candidates or if there are multiple
|
||||
/// candidates in which case it reports ambiguity.
|
||||
|
@ -976,13 +976,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
ty_param_def_id: LocalDefId,
|
||||
ty_param_span: Span,
|
||||
kind: ty::AssocKind,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
span: Span,
|
||||
) -> Result<ty::PolyTraitRef<'tcx>, ErrorGuaranteed> {
|
||||
debug!(?ty_param_def_id, ?assoc_name, ?span);
|
||||
debug!(?ty_param_def_id, ?assoc_ident, ?span);
|
||||
let tcx = self.tcx();
|
||||
|
||||
let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_name);
|
||||
let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_ident);
|
||||
debug!("predicates={:#?}", predicates);
|
||||
|
||||
self.probe_single_bound_for_assoc_item(
|
||||
|
@ -990,17 +990,18 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
let trait_refs = predicates
|
||||
.iter_identity_copied()
|
||||
.filter_map(|(p, _)| Some(p.as_trait_clause()?.map_bound(|t| t.trait_ref)));
|
||||
traits::transitive_bounds_that_define_assoc_item(tcx, trait_refs, assoc_name)
|
||||
traits::transitive_bounds_that_define_assoc_item(tcx, trait_refs, assoc_ident)
|
||||
},
|
||||
AssocItemQSelf::TyParam(ty_param_def_id, ty_param_span),
|
||||
kind,
|
||||
assoc_name,
|
||||
assoc_ident,
|
||||
span,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
/// Search for a single trait bound whose trait defines the associated item given by `assoc_name`.
|
||||
/// Search for a single trait bound whose trait defines the associated item given by
|
||||
/// `assoc_ident`.
|
||||
///
|
||||
/// This fails if there is no such bound in the list of candidates or if there are multiple
|
||||
/// candidates in which case it reports ambiguity.
|
||||
|
@ -1010,7 +1011,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
all_candidates: impl Fn() -> I,
|
||||
qself: AssocItemQSelf,
|
||||
assoc_kind: ty::AssocKind,
|
||||
assoc_name: Ident,
|
||||
assoc_ident: Ident,
|
||||
span: Span,
|
||||
constraint: Option<&hir::AssocItemConstraint<'tcx>>,
|
||||
) -> Result<ty::PolyTraitRef<'tcx>, ErrorGuaranteed>
|
||||
|
@ -1020,7 +1021,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
let tcx = self.tcx();
|
||||
|
||||
let mut matching_candidates = all_candidates().filter(|r| {
|
||||
self.probe_trait_that_defines_assoc_item(r.def_id(), assoc_kind, assoc_name)
|
||||
self.probe_trait_that_defines_assoc_item(r.def_id(), assoc_kind, assoc_ident)
|
||||
});
|
||||
|
||||
let Some(bound) = matching_candidates.next() else {
|
||||
|
@ -1028,7 +1029,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
all_candidates,
|
||||
qself,
|
||||
assoc_kind,
|
||||
assoc_name,
|
||||
assoc_ident,
|
||||
span,
|
||||
constraint,
|
||||
);
|
||||
|
@ -1044,7 +1045,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
let mut err = self.dcx().create_err(crate::errors::AmbiguousAssocItem {
|
||||
span,
|
||||
assoc_kind: assoc_kind_str,
|
||||
assoc_name,
|
||||
assoc_ident,
|
||||
qself: &qself_str,
|
||||
});
|
||||
// Provide a more specific error code index entry for equality bindings.
|
||||
|
@ -1065,13 +1066,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
let bound_id = bound.def_id();
|
||||
let bound_span = tcx
|
||||
.associated_items(bound_id)
|
||||
.find_by_name_and_kind(tcx, assoc_name, assoc_kind, bound_id)
|
||||
.find_by_ident_and_kind(tcx, assoc_ident, assoc_kind, bound_id)
|
||||
.and_then(|item| tcx.hir_span_if_local(item.def_id));
|
||||
|
||||
if let Some(bound_span) = bound_span {
|
||||
err.span_label(
|
||||
bound_span,
|
||||
format!("ambiguous `{assoc_name}` from `{}`", bound.print_trait_sugared(),),
|
||||
format!("ambiguous `{assoc_ident}` from `{}`", bound.print_trait_sugared(),),
|
||||
);
|
||||
if let Some(constraint) = constraint {
|
||||
match constraint.kind {
|
||||
|
@ -1087,7 +1088,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
}
|
||||
// FIXME(#97583): This isn't syntactically well-formed!
|
||||
where_bounds.push(format!(
|
||||
" T: {trait}::{assoc_name} = {term}",
|
||||
" T: {trait}::{assoc_ident} = {term}",
|
||||
trait = bound.print_only_trait_path(),
|
||||
));
|
||||
}
|
||||
|
@ -1096,7 +1097,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
}
|
||||
} else {
|
||||
err.span_suggestion_verbose(
|
||||
span.with_hi(assoc_name.span.lo()),
|
||||
span.with_hi(assoc_ident.span.lo()),
|
||||
"use fully-qualified syntax to disambiguate",
|
||||
format!("<{qself_str} as {}>::", bound.print_only_trait_path()),
|
||||
Applicability::MaybeIncorrect,
|
||||
|
@ -1104,7 +1105,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
}
|
||||
} else {
|
||||
err.note(format!(
|
||||
"associated {assoc_kind_str} `{assoc_name}` could derive from `{}`",
|
||||
"associated {assoc_kind_str} `{assoc_ident}` could derive from `{}`",
|
||||
bound.print_only_trait_path(),
|
||||
));
|
||||
}
|
||||
|
@ -2858,7 +2859,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
|||
|
||||
let trait_ref = self.lower_impl_trait_ref(i.of_trait.as_ref()?, self.lower_ty(i.self_ty));
|
||||
|
||||
let assoc = tcx.associated_items(trait_ref.def_id).find_by_name_and_kind(
|
||||
let assoc = tcx.associated_items(trait_ref.def_id).find_by_ident_and_kind(
|
||||
tcx,
|
||||
*ident,
|
||||
ty::AssocKind::Fn,
|
||||
|
|
|
@ -397,7 +397,7 @@ impl<'a> State<'a> {
|
|||
self.pclose();
|
||||
}
|
||||
hir::TyKind::BareFn(f) => {
|
||||
self.print_ty_fn(f.abi, f.safety, f.decl, None, f.generic_params, f.param_names);
|
||||
self.print_ty_fn(f.abi, f.safety, f.decl, None, f.generic_params, f.param_idents);
|
||||
}
|
||||
hir::TyKind::UnsafeBinder(unsafe_binder) => {
|
||||
self.print_unsafe_binder(unsafe_binder);
|
||||
|
@ -473,14 +473,14 @@ impl<'a> State<'a> {
|
|||
self.maybe_print_comment(item.span.lo());
|
||||
self.print_attrs_as_outer(self.attrs(item.hir_id()));
|
||||
match item.kind {
|
||||
hir::ForeignItemKind::Fn(sig, arg_names, generics) => {
|
||||
hir::ForeignItemKind::Fn(sig, arg_idents, generics) => {
|
||||
self.head("");
|
||||
self.print_fn(
|
||||
sig.decl,
|
||||
sig.header,
|
||||
Some(item.ident.name),
|
||||
generics,
|
||||
arg_names,
|
||||
arg_idents,
|
||||
None,
|
||||
);
|
||||
self.end(); // end head-ibox
|
||||
|
@ -899,10 +899,10 @@ impl<'a> State<'a> {
|
|||
ident: Ident,
|
||||
m: &hir::FnSig<'_>,
|
||||
generics: &hir::Generics<'_>,
|
||||
arg_names: &[Option<Ident>],
|
||||
arg_idents: &[Option<Ident>],
|
||||
body_id: Option<hir::BodyId>,
|
||||
) {
|
||||
self.print_fn(m.decl, m.header, Some(ident.name), generics, arg_names, body_id);
|
||||
self.print_fn(m.decl, m.header, Some(ident.name), generics, arg_idents, body_id);
|
||||
}
|
||||
|
||||
fn print_trait_item(&mut self, ti: &hir::TraitItem<'_>) {
|
||||
|
@ -914,8 +914,8 @@ impl<'a> State<'a> {
|
|||
hir::TraitItemKind::Const(ty, default) => {
|
||||
self.print_associated_const(ti.ident, ti.generics, ty, default);
|
||||
}
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(arg_names)) => {
|
||||
self.print_method_sig(ti.ident, sig, ti.generics, arg_names, None);
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(arg_idents)) => {
|
||||
self.print_method_sig(ti.ident, sig, ti.generics, arg_idents, None);
|
||||
self.word(";");
|
||||
}
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
|
||||
|
@ -2122,7 +2122,7 @@ impl<'a> State<'a> {
|
|||
header: hir::FnHeader,
|
||||
name: Option<Symbol>,
|
||||
generics: &hir::Generics<'_>,
|
||||
arg_names: &[Option<Ident>],
|
||||
arg_idents: &[Option<Ident>],
|
||||
body_id: Option<hir::BodyId>,
|
||||
) {
|
||||
self.print_fn_header_info(header);
|
||||
|
@ -2134,16 +2134,16 @@ impl<'a> State<'a> {
|
|||
self.print_generic_params(generics.params);
|
||||
|
||||
self.popen();
|
||||
// Make sure we aren't supplied *both* `arg_names` and `body_id`.
|
||||
assert!(arg_names.is_empty() || body_id.is_none());
|
||||
// Make sure we aren't supplied *both* `arg_idents` and `body_id`.
|
||||
assert!(arg_idents.is_empty() || body_id.is_none());
|
||||
let mut i = 0;
|
||||
let mut print_arg = |s: &mut Self, ty: Option<&hir::Ty<'_>>| {
|
||||
if i == 0 && decl.implicit_self.has_implicit_self() {
|
||||
s.print_implicit_self(&decl.implicit_self);
|
||||
} else {
|
||||
if let Some(arg_name) = arg_names.get(i) {
|
||||
if let Some(arg_name) = arg_name {
|
||||
s.word(arg_name.to_string());
|
||||
if let Some(arg_ident) = arg_idents.get(i) {
|
||||
if let Some(arg_ident) = arg_ident {
|
||||
s.word(arg_ident.to_string());
|
||||
s.word(":");
|
||||
s.space();
|
||||
}
|
||||
|
@ -2452,7 +2452,7 @@ impl<'a> State<'a> {
|
|||
decl: &hir::FnDecl<'_>,
|
||||
name: Option<Symbol>,
|
||||
generic_params: &[hir::GenericParam<'_>],
|
||||
arg_names: &[Option<Ident>],
|
||||
arg_idents: &[Option<Ident>],
|
||||
) {
|
||||
self.ibox(INDENT_UNIT);
|
||||
self.print_formal_generic_params(generic_params);
|
||||
|
@ -2467,7 +2467,7 @@ impl<'a> State<'a> {
|
|||
},
|
||||
name,
|
||||
generics,
|
||||
arg_names,
|
||||
arg_idents,
|
||||
None,
|
||||
);
|
||||
self.end();
|
||||
|
|
|
@ -148,7 +148,7 @@ hir_typeck_never_type_fallback_flowing_into_unsafe_path = never type fallback af
|
|||
hir_typeck_never_type_fallback_flowing_into_unsafe_union_field = never type fallback affects this union access
|
||||
.help = specify the type explicitly
|
||||
|
||||
hir_typeck_no_associated_item = no {$item_kind} named `{$item_name}` found for {$ty_prefix} `{$ty_str}`{$trait_missing_method ->
|
||||
hir_typeck_no_associated_item = no {$item_kind} named `{$item_ident}` found for {$ty_prefix} `{$ty_str}`{$trait_missing_method ->
|
||||
[true] {""}
|
||||
*[other] {" "}in the current scope
|
||||
}
|
||||
|
|
|
@ -727,7 +727,7 @@ pub(crate) struct NoAssociatedItem {
|
|||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub item_kind: &'static str,
|
||||
pub item_name: Ident,
|
||||
pub item_ident: Ident,
|
||||
pub ty_prefix: Cow<'static, str>,
|
||||
pub ty_str: String,
|
||||
pub trait_missing_method: bool,
|
||||
|
|
|
@ -2920,8 +2920,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
}
|
||||
// We failed to check the expression, report an error.
|
||||
|
||||
// Emits an error if we deref an infer variable, like calling `.field` on a base type of &_.
|
||||
self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false));
|
||||
// Emits an error if we deref an infer variable, like calling `.field` on a base type
|
||||
// of `&_`. We can also use this to suppress unnecessary "missing field" errors that
|
||||
// will follow ambiguity errors.
|
||||
let final_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false));
|
||||
if let ty::Error(_) = final_ty.kind() {
|
||||
return final_ty;
|
||||
}
|
||||
|
||||
if let Some((adjustments, did)) = private_candidate {
|
||||
// (#90483) apply adjustments to avoid ExprUseVisitor from
|
||||
|
|
|
@ -1136,7 +1136,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
&& let self_implicit =
|
||||
matches!(call_expr.kind, hir::ExprKind::MethodCall(..)) as usize
|
||||
&& let Some(Some(arg)) =
|
||||
self.tcx.fn_arg_names(fn_def_id).get(expected_idx.as_usize() + self_implicit)
|
||||
self.tcx.fn_arg_idents(fn_def_id).get(expected_idx.as_usize() + self_implicit)
|
||||
&& arg.name != kw::SelfLower
|
||||
{
|
||||
format!("/* {} */", arg.name)
|
||||
|
@ -2619,7 +2619,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
is_method: bool,
|
||||
) -> Option<(IndexVec<ExpectedIdx, (Option<GenericIdx>, FnParam<'_>)>, &hir::Generics<'_>)>
|
||||
{
|
||||
let (sig, generics, body_id, param_names) = match self.tcx.hir_get_if_local(def_id)? {
|
||||
let (sig, generics, body_id, params) = match self.tcx.hir_get_if_local(def_id)? {
|
||||
hir::Node::TraitItem(&hir::TraitItem {
|
||||
generics,
|
||||
kind: hir::TraitItemKind::Fn(sig, trait_fn),
|
||||
|
@ -2661,7 +2661,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
None
|
||||
}
|
||||
});
|
||||
match (body_id, param_names) {
|
||||
match (body_id, params) {
|
||||
(Some(_), Some(_)) | (None, None) => unreachable!(),
|
||||
(Some(body), None) => {
|
||||
let params = self.tcx.hir_body(body).params;
|
||||
|
@ -2678,7 +2678,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
params.get(is_method as usize..params.len() - sig.decl.c_variadic as usize)?;
|
||||
debug_assert_eq!(params.len(), fn_inputs.len());
|
||||
Some((
|
||||
fn_inputs.zip(params.iter().map(|&ident| FnParam::Name(ident))).collect(),
|
||||
fn_inputs.zip(params.iter().map(|&ident| FnParam::Ident(ident))).collect(),
|
||||
generics,
|
||||
))
|
||||
}
|
||||
|
@ -2709,14 +2709,14 @@ impl<'tcx> Visitor<'tcx> for FindClosureArg<'tcx> {
|
|||
#[derive(Clone, Copy)]
|
||||
enum FnParam<'hir> {
|
||||
Param(&'hir hir::Param<'hir>),
|
||||
Name(Option<Ident>),
|
||||
Ident(Option<Ident>),
|
||||
}
|
||||
|
||||
impl FnParam<'_> {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
Self::Param(param) => param.span,
|
||||
Self::Name(ident) => {
|
||||
Self::Ident(ident) => {
|
||||
if let Some(ident) = ident {
|
||||
ident.span
|
||||
} else {
|
||||
|
@ -2738,7 +2738,7 @@ impl FnParam<'_> {
|
|||
{
|
||||
Some(ident.name)
|
||||
}
|
||||
FnParam::Name(ident)
|
||||
FnParam::Ident(ident)
|
||||
if let Some(ident) = ident
|
||||
&& ident.name != kw::Underscore =>
|
||||
{
|
||||
|
|
|
@ -534,12 +534,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
Ok((def_kind, pick.item.def_id))
|
||||
}
|
||||
|
||||
/// Finds item with name `item_name` defined in impl/trait `def_id`
|
||||
/// Finds item with name `item_ident` defined in impl/trait `def_id`
|
||||
/// and return it, or `None`, if no such item was defined there.
|
||||
fn associated_value(&self, def_id: DefId, item_name: Ident) -> Option<ty::AssocItem> {
|
||||
fn associated_value(&self, def_id: DefId, item_ident: Ident) -> Option<ty::AssocItem> {
|
||||
self.tcx
|
||||
.associated_items(def_id)
|
||||
.find_by_name_and_namespace(self.tcx, item_name, Namespace::ValueNS, def_id)
|
||||
.find_by_ident_and_namespace(self.tcx, item_ident, Namespace::ValueNS, def_id)
|
||||
.copied()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -585,7 +585,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
&self,
|
||||
mut span: Span,
|
||||
rcvr_ty: Ty<'tcx>,
|
||||
item_name: Ident,
|
||||
item_ident: Ident,
|
||||
expr_id: hir::HirId,
|
||||
source: SelfSource<'tcx>,
|
||||
args: Option<&'tcx [hir::Expr<'tcx>]>,
|
||||
|
@ -616,7 +616,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
} else if rcvr_ty.is_enum() {
|
||||
"variant or associated item"
|
||||
} else {
|
||||
match (item_name.as_str().chars().next(), rcvr_ty.is_fresh_ty()) {
|
||||
match (item_ident.as_str().chars().next(), rcvr_ty.is_fresh_ty()) {
|
||||
(Some(name), false) if name.is_lowercase() => "function or associated item",
|
||||
(Some(_), false) => "associated item",
|
||||
(Some(_), true) | (None, false) => "variant or associated item",
|
||||
|
@ -631,7 +631,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
rcvr_ty,
|
||||
source,
|
||||
span,
|
||||
item_name,
|
||||
item_ident,
|
||||
&short_ty_str,
|
||||
&mut ty_file,
|
||||
) {
|
||||
|
@ -643,13 +643,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
source,
|
||||
span,
|
||||
item_kind,
|
||||
item_name,
|
||||
item_ident,
|
||||
&short_ty_str,
|
||||
&mut ty_file,
|
||||
) {
|
||||
return guar;
|
||||
}
|
||||
span = item_name.span;
|
||||
span = item_ident.span;
|
||||
|
||||
// Don't show generic arguments when the method can't be found in any implementation (#81576).
|
||||
let mut ty_str_reported = ty_str.clone();
|
||||
|
@ -661,7 +661,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
self.tcx
|
||||
.inherent_impls(adt_def.did())
|
||||
.into_iter()
|
||||
.any(|def_id| self.associated_value(*def_id, item_name).is_some())
|
||||
.any(|def_id| self.associated_value(*def_id, item_ident).is_some())
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -678,14 +678,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let is_write = sugg_span.ctxt().outer_expn_data().macro_def_id.is_some_and(|def_id| {
|
||||
tcx.is_diagnostic_item(sym::write_macro, def_id)
|
||||
|| tcx.is_diagnostic_item(sym::writeln_macro, def_id)
|
||||
}) && item_name.name == sym::write_fmt;
|
||||
}) && item_ident.name == sym::write_fmt;
|
||||
let mut err = if is_write && let SelfSource::MethodCall(rcvr_expr) = source {
|
||||
self.suggest_missing_writer(rcvr_ty, rcvr_expr)
|
||||
} else {
|
||||
let mut err = self.dcx().create_err(NoAssociatedItem {
|
||||
span,
|
||||
item_kind,
|
||||
item_name,
|
||||
item_ident,
|
||||
ty_prefix: if trait_missing_method {
|
||||
// FIXME(mu001999) E0599 maybe not suitable here because it is for types
|
||||
Cow::from("trait")
|
||||
|
@ -699,7 +699,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
if is_method {
|
||||
self.suggest_use_shadowed_binding_with_method(
|
||||
source,
|
||||
item_name,
|
||||
item_ident,
|
||||
&ty_str_reported,
|
||||
&mut err,
|
||||
);
|
||||
|
@ -710,9 +710,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
&& let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind
|
||||
&& let Res::SelfTyAlias { alias_to: impl_def_id, .. } = path.res
|
||||
&& let DefKind::Impl { .. } = self.tcx.def_kind(impl_def_id)
|
||||
&& let Some(candidate) = tcx.associated_items(impl_def_id).find_by_name_and_kind(
|
||||
&& let Some(candidate) = tcx.associated_items(impl_def_id).find_by_ident_and_kind(
|
||||
self.tcx,
|
||||
item_name,
|
||||
item_ident,
|
||||
ty::AssocKind::Type,
|
||||
impl_def_id,
|
||||
)
|
||||
|
@ -722,7 +722,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
{
|
||||
let def_path = tcx.def_path_str(adt_def.did());
|
||||
err.span_suggestion(
|
||||
ty.span.to(item_name.span),
|
||||
ty.span.to(item_ident.span),
|
||||
format!("to construct a value of type `{}`, use the explicit path", def_path),
|
||||
def_path,
|
||||
Applicability::MachineApplicable,
|
||||
|
@ -750,7 +750,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
self.find_builder_fn(&mut err, rcvr_ty, expr_id);
|
||||
}
|
||||
|
||||
if tcx.ty_is_opaque_future(rcvr_ty) && item_name.name == sym::poll {
|
||||
if tcx.ty_is_opaque_future(rcvr_ty) && item_ident.name == sym::poll {
|
||||
err.help(format!(
|
||||
"method `poll` found on `Pin<&mut {ty_str}>`, \
|
||||
see documentation for `std::pin::Pin`"
|
||||
|
@ -765,7 +765,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
{
|
||||
self.suggest_await_before_method(
|
||||
&mut err,
|
||||
item_name,
|
||||
item_ident,
|
||||
rcvr_ty,
|
||||
cal,
|
||||
span,
|
||||
|
@ -787,7 +787,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
if let SelfSource::MethodCall(rcvr_expr) = source
|
||||
&& let ty::RawPtr(ty, ptr_mutbl) = *rcvr_ty.kind()
|
||||
&& let Ok(pick) = self.lookup_probe_for_diagnostic(
|
||||
item_name,
|
||||
item_ident,
|
||||
Ty::new_ref(tcx, ty::Region::new_error_misc(tcx), ty, ptr_mutbl),
|
||||
self.tcx.hir_expect_expr(self.tcx.parent_hir_id(rcvr_expr.hir_id)),
|
||||
ProbeScope::TraitsInScope,
|
||||
|
@ -808,7 +808,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
};
|
||||
err.span_note(
|
||||
tcx.def_span(pick.item.def_id),
|
||||
format!("the method `{item_name}` exists on the type `{ty}`", ty = pick.self_ty),
|
||||
format!("the method `{item_ident}` exists on the type `{ty}`", ty = pick.self_ty),
|
||||
);
|
||||
let mut_str = ptr_mutbl.ptr_str();
|
||||
err.note(format!(
|
||||
|
@ -834,7 +834,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
self.suggest_fn_call(&mut err, rcvr_expr, rcvr_ty, |output_ty| {
|
||||
let call_expr = self.tcx.hir_expect_expr(self.tcx.parent_hir_id(rcvr_expr.hir_id));
|
||||
let probe = self.lookup_probe_for_diagnostic(
|
||||
item_name,
|
||||
item_ident,
|
||||
output_ty,
|
||||
call_expr,
|
||||
ProbeScope::AllTraits,
|
||||
|
@ -873,13 +873,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
static_candidates,
|
||||
rcvr_ty,
|
||||
source,
|
||||
item_name,
|
||||
item_ident,
|
||||
args,
|
||||
sugg_span,
|
||||
);
|
||||
self.note_candidates_on_method_error(
|
||||
rcvr_ty,
|
||||
item_name,
|
||||
item_ident,
|
||||
source,
|
||||
args,
|
||||
span,
|
||||
|
@ -890,7 +890,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
} else if static_candidates.len() > 1 {
|
||||
self.note_candidates_on_method_error(
|
||||
rcvr_ty,
|
||||
item_name,
|
||||
item_ident,
|
||||
source,
|
||||
args,
|
||||
span,
|
||||
|
@ -904,7 +904,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let mut restrict_type_params = false;
|
||||
let mut suggested_derive = false;
|
||||
let mut unsatisfied_bounds = false;
|
||||
if item_name.name == sym::count && self.is_slice_ty(rcvr_ty, span) {
|
||||
if item_ident.name == sym::count && self.is_slice_ty(rcvr_ty, span) {
|
||||
let msg = "consider using `len` instead";
|
||||
if let SelfSource::MethodCall(_expr) = source {
|
||||
err.span_suggestion_short(span, msg, "len", Applicability::MachineApplicable);
|
||||
|
@ -1349,7 +1349,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
};
|
||||
let primary_message = primary_message.unwrap_or_else(|| {
|
||||
format!(
|
||||
"the {item_kind} `{item_name}` exists for {actual_prefix} `{ty_str}`, \
|
||||
"the {item_kind} `{item_ident}` exists for {actual_prefix} `{ty_str}`, \
|
||||
but its trait bounds were not satisfied"
|
||||
)
|
||||
});
|
||||
|
@ -1379,7 +1379,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// `Pin<&Self>`.
|
||||
if targs.len() == 1 {
|
||||
let mut item_segment = hir::PathSegment::invalid();
|
||||
item_segment.ident = item_name;
|
||||
item_segment.ident = item_ident;
|
||||
for t in [Ty::new_mut_ref, Ty::new_imm_ref, |_, _, t| t] {
|
||||
let new_args =
|
||||
tcx.mk_args_from_iter(targs.iter().map(|arg| match arg.as_type() {
|
||||
|
@ -1423,9 +1423,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
ty::Adt(adt, _) => self.tcx.is_lang_item(adt.did(), LangItem::String),
|
||||
_ => false,
|
||||
};
|
||||
if is_string_or_ref_str && item_name.name == sym::iter {
|
||||
if is_string_or_ref_str && item_ident.name == sym::iter {
|
||||
err.span_suggestion_verbose(
|
||||
item_name.span,
|
||||
item_ident.span,
|
||||
"because of the in-memory representation of `&str`, to obtain \
|
||||
an `Iterator` over each of its codepoint use method `chars`",
|
||||
"chars",
|
||||
|
@ -1439,7 +1439,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
.into_iter()
|
||||
.copied()
|
||||
.filter(|def_id| {
|
||||
if let Some(assoc) = self.associated_value(*def_id, item_name) {
|
||||
if let Some(assoc) = self.associated_value(*def_id, item_ident) {
|
||||
// Check for both mode is the same so we avoid suggesting
|
||||
// incorrect associated item.
|
||||
match (mode, assoc.fn_has_self_parameter, source) {
|
||||
|
@ -1500,7 +1500,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// If the method name is the name of a field with a function or closure type,
|
||||
// give a helping note that it has to be called as `(x.f)(...)`.
|
||||
if let SelfSource::MethodCall(expr) = source {
|
||||
if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err)
|
||||
if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_ident, &mut err)
|
||||
&& similar_candidate.is_none()
|
||||
&& !custom_span_label
|
||||
{
|
||||
|
@ -1513,7 +1513,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let confusable_suggested = self.confusable_method_name(
|
||||
&mut err,
|
||||
rcvr_ty,
|
||||
item_name,
|
||||
item_ident,
|
||||
args.map(|args| {
|
||||
args.iter()
|
||||
.map(|expr| {
|
||||
|
@ -1531,12 +1531,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
source,
|
||||
span,
|
||||
rcvr_ty,
|
||||
item_name,
|
||||
item_ident,
|
||||
expected.only_has_type(self),
|
||||
);
|
||||
}
|
||||
|
||||
self.suggest_unwrapping_inner_self(&mut err, source, rcvr_ty, item_name);
|
||||
self.suggest_unwrapping_inner_self(&mut err, source, rcvr_ty, item_ident);
|
||||
|
||||
for (span, mut bounds) in bound_spans {
|
||||
if !tcx.sess.source_map().is_span_accessible(span) {
|
||||
|
@ -1547,7 +1547,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let pre = if Some(span) == ty_span {
|
||||
ty_span.take();
|
||||
format!(
|
||||
"{item_kind} `{item_name}` not found for this {} because it ",
|
||||
"{item_kind} `{item_ident}` not found for this {} because it ",
|
||||
rcvr_ty.prefix_string(self.tcx)
|
||||
)
|
||||
} else {
|
||||
|
@ -1567,7 +1567,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
err.span_label(
|
||||
span,
|
||||
format!(
|
||||
"{item_kind} `{item_name}` not found for this {}",
|
||||
"{item_kind} `{item_ident}` not found for this {}",
|
||||
rcvr_ty.prefix_string(self.tcx)
|
||||
),
|
||||
);
|
||||
|
@ -1579,7 +1579,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
&mut err,
|
||||
span,
|
||||
rcvr_ty,
|
||||
item_name,
|
||||
item_ident,
|
||||
args.map(|args| args.len() + 1),
|
||||
source,
|
||||
no_match_data.out_of_scope_traits.clone(),
|
||||
|
@ -1596,7 +1596,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
let adt_def = rcvr_ty.ty_adt_def().expect("enum is not an ADT");
|
||||
if let Some(var_name) = edit_distance::find_best_match_for_name(
|
||||
&adt_def.variants().iter().map(|s| s.name).collect::<Vec<_>>(),
|
||||
item_name.name,
|
||||
item_ident.name,
|
||||
None,
|
||||
) && let Some(variant) = adt_def.variants().iter().find(|s| s.name == var_name)
|
||||
{
|
||||
|
@ -1737,14 +1737,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
if !find_candidate_for_method {
|
||||
self.lookup_segments_chain_for_no_match_method(
|
||||
&mut err,
|
||||
item_name,
|
||||
item_ident,
|
||||
item_kind,
|
||||
source,
|
||||
no_match_data,
|
||||
);
|
||||
}
|
||||
|
||||
self.note_derefed_ty_has_method(&mut err, source, rcvr_ty, item_name, expected);
|
||||
self.note_derefed_ty_has_method(&mut err, source, rcvr_ty, item_ident, expected);
|
||||
err.emit()
|
||||
}
|
||||
|
||||
|
|
|
@ -163,9 +163,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
enum AdjustMode {
|
||||
/// Peel off all immediate reference types.
|
||||
Peel,
|
||||
/// Reset binding mode to the initial mode.
|
||||
/// Used for destructuring assignment, where we don't want any match ergonomics.
|
||||
Reset,
|
||||
/// Pass on the input binding mode and expected type.
|
||||
Pass,
|
||||
}
|
||||
|
@ -321,77 +318,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
/// Conversely, inside this module, `check_pat_top` should never be used.
|
||||
#[instrument(level = "debug", skip(self, pat_info))]
|
||||
fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<'tcx>) {
|
||||
let PatInfo { binding_mode, max_ref_mutbl, top_info: ti, current_depth, .. } = pat_info;
|
||||
|
||||
let path_res = match pat.kind {
|
||||
let opt_path_res = match pat.kind {
|
||||
PatKind::Expr(PatExpr { kind: PatExprKind::Path(qpath), hir_id, span }) => {
|
||||
Some(self.resolve_ty_and_res_fully_qualified_call(qpath, *hir_id, *span))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res));
|
||||
let (expected, binding_mode, max_ref_mutbl) =
|
||||
self.calc_default_binding_mode(pat, expected, binding_mode, adjust_mode, max_ref_mutbl);
|
||||
let pat_info = PatInfo {
|
||||
binding_mode,
|
||||
max_ref_mutbl,
|
||||
top_info: ti,
|
||||
decl_origin: pat_info.decl_origin,
|
||||
current_depth: current_depth + 1,
|
||||
};
|
||||
|
||||
let ty = match pat.kind {
|
||||
PatKind::Missing | PatKind::Wild | PatKind::Err(_) => expected,
|
||||
// We allow any type here; we ensure that the type is uninhabited during match checking.
|
||||
PatKind::Never => expected,
|
||||
PatKind::Expr(PatExpr { kind: PatExprKind::Path(qpath), hir_id, span }) => {
|
||||
let ty = self.check_pat_path(
|
||||
*hir_id,
|
||||
pat.hir_id,
|
||||
*span,
|
||||
qpath,
|
||||
path_res.unwrap(),
|
||||
expected,
|
||||
&pat_info.top_info,
|
||||
);
|
||||
self.write_ty(*hir_id, ty);
|
||||
ty
|
||||
}
|
||||
PatKind::Expr(lt) => self.check_pat_lit(pat.span, lt, expected, &pat_info.top_info),
|
||||
PatKind::Range(lhs, rhs, _) => {
|
||||
self.check_pat_range(pat.span, lhs, rhs, expected, &pat_info.top_info)
|
||||
}
|
||||
PatKind::Binding(ba, var_id, ident, sub) => {
|
||||
self.check_pat_ident(pat, ba, var_id, ident, sub, expected, pat_info)
|
||||
}
|
||||
PatKind::TupleStruct(ref qpath, subpats, ddpos) => {
|
||||
self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, pat_info)
|
||||
}
|
||||
PatKind::Struct(ref qpath, fields, has_rest_pat) => {
|
||||
self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, pat_info)
|
||||
}
|
||||
PatKind::Guard(pat, cond) => {
|
||||
self.check_pat(pat, expected, pat_info);
|
||||
self.check_expr_has_type_or_error(cond, self.tcx.types.bool, |_| {});
|
||||
expected
|
||||
}
|
||||
PatKind::Or(pats) => {
|
||||
for pat in pats {
|
||||
self.check_pat(pat, expected, pat_info);
|
||||
}
|
||||
expected
|
||||
}
|
||||
PatKind::Tuple(elements, ddpos) => {
|
||||
self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info)
|
||||
}
|
||||
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info),
|
||||
PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info),
|
||||
PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info),
|
||||
PatKind::Slice(before, slice, after) => {
|
||||
self.check_pat_slice(pat.span, before, slice, after, expected, pat_info)
|
||||
}
|
||||
};
|
||||
|
||||
let adjust_mode = self.calc_adjust_mode(pat, opt_path_res.map(|(res, ..)| res));
|
||||
let ty = self.check_pat_inner(pat, opt_path_res, adjust_mode, expected, pat_info);
|
||||
self.write_ty(pat.hir_id, ty);
|
||||
|
||||
// (note_1): In most of the cases where (note_1) is referenced
|
||||
|
@ -437,27 +371,126 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
// `regions-relate-bound-regions-on-closures-to-inference-variables.rs`,
|
||||
}
|
||||
|
||||
/// Compute the new expected type and default binding mode from the old ones
|
||||
/// as well as the pattern form we are currently checking.
|
||||
fn calc_default_binding_mode(
|
||||
// Helper to avoid resolving the same path pattern several times.
|
||||
fn check_pat_inner(
|
||||
&self,
|
||||
pat: &'tcx Pat<'tcx>,
|
||||
expected: Ty<'tcx>,
|
||||
def_br: ByRef,
|
||||
opt_path_res: Option<(Res, Option<LoweredTy<'tcx>>, &'tcx [hir::PathSegment<'tcx>])>,
|
||||
adjust_mode: AdjustMode,
|
||||
max_ref_mutbl: MutblCap,
|
||||
) -> (Ty<'tcx>, ByRef, MutblCap) {
|
||||
expected: Ty<'tcx>,
|
||||
pat_info: PatInfo<'tcx>,
|
||||
) -> Ty<'tcx> {
|
||||
#[cfg(debug_assertions)]
|
||||
if def_br == ByRef::Yes(Mutability::Mut)
|
||||
&& max_ref_mutbl != MutblCap::Mut
|
||||
if pat_info.binding_mode == ByRef::Yes(Mutability::Mut)
|
||||
&& pat_info.max_ref_mutbl != MutblCap::Mut
|
||||
&& self.downgrade_mut_inside_shared()
|
||||
{
|
||||
span_bug!(pat.span, "Pattern mutability cap violated!");
|
||||
}
|
||||
match adjust_mode {
|
||||
AdjustMode::Pass => (expected, def_br, max_ref_mutbl),
|
||||
AdjustMode::Reset => (expected, ByRef::No, MutblCap::Mut),
|
||||
AdjustMode::Peel => self.peel_off_references(pat, expected, def_br, max_ref_mutbl),
|
||||
|
||||
// Resolve type if needed.
|
||||
let expected = if let AdjustMode::Peel = adjust_mode
|
||||
&& pat.default_binding_modes
|
||||
{
|
||||
self.try_structurally_resolve_type(pat.span, expected)
|
||||
} else {
|
||||
expected
|
||||
};
|
||||
let old_pat_info = pat_info;
|
||||
let pat_info = PatInfo { current_depth: old_pat_info.current_depth + 1, ..old_pat_info };
|
||||
|
||||
match pat.kind {
|
||||
// Peel off a `&` or `&mut` from the scrutinee type. See the examples in
|
||||
// `tests/ui/rfcs/rfc-2005-default-binding-mode`.
|
||||
_ if let AdjustMode::Peel = adjust_mode
|
||||
&& pat.default_binding_modes
|
||||
&& let ty::Ref(_, inner_ty, inner_mutability) = *expected.kind() =>
|
||||
{
|
||||
debug!("inspecting {:?}", expected);
|
||||
|
||||
debug!("current discriminant is Ref, inserting implicit deref");
|
||||
// Preserve the reference type. We'll need it later during THIR lowering.
|
||||
self.typeck_results
|
||||
.borrow_mut()
|
||||
.pat_adjustments_mut()
|
||||
.entry(pat.hir_id)
|
||||
.or_default()
|
||||
.push(expected);
|
||||
|
||||
let mut binding_mode = ByRef::Yes(match pat_info.binding_mode {
|
||||
// If default binding mode is by value, make it `ref` or `ref mut`
|
||||
// (depending on whether we observe `&` or `&mut`).
|
||||
ByRef::No |
|
||||
// When `ref mut`, stay a `ref mut` (on `&mut`) or downgrade to `ref` (on `&`).
|
||||
ByRef::Yes(Mutability::Mut) => inner_mutability,
|
||||
// Once a `ref`, always a `ref`.
|
||||
// This is because a `& &mut` cannot mutate the underlying value.
|
||||
ByRef::Yes(Mutability::Not) => Mutability::Not,
|
||||
});
|
||||
|
||||
let mut max_ref_mutbl = pat_info.max_ref_mutbl;
|
||||
if self.downgrade_mut_inside_shared() {
|
||||
binding_mode = binding_mode.cap_ref_mutability(max_ref_mutbl.as_mutbl());
|
||||
}
|
||||
if binding_mode == ByRef::Yes(Mutability::Not) {
|
||||
max_ref_mutbl = MutblCap::Not;
|
||||
}
|
||||
debug!("default binding mode is now {:?}", binding_mode);
|
||||
|
||||
// Use the old pat info to keep `current_depth` to its old value.
|
||||
let new_pat_info = PatInfo { binding_mode, max_ref_mutbl, ..old_pat_info };
|
||||
// Recurse with the new expected type.
|
||||
self.check_pat_inner(pat, opt_path_res, adjust_mode, inner_ty, new_pat_info)
|
||||
}
|
||||
PatKind::Missing | PatKind::Wild | PatKind::Err(_) => expected,
|
||||
// We allow any type here; we ensure that the type is uninhabited during match checking.
|
||||
PatKind::Never => expected,
|
||||
PatKind::Expr(PatExpr { kind: PatExprKind::Path(qpath), hir_id, span }) => {
|
||||
let ty = self.check_pat_path(
|
||||
*hir_id,
|
||||
pat.hir_id,
|
||||
*span,
|
||||
qpath,
|
||||
opt_path_res.unwrap(),
|
||||
expected,
|
||||
&pat_info.top_info,
|
||||
);
|
||||
self.write_ty(*hir_id, ty);
|
||||
ty
|
||||
}
|
||||
PatKind::Expr(lt) => self.check_pat_lit(pat.span, lt, expected, &pat_info.top_info),
|
||||
PatKind::Range(lhs, rhs, _) => {
|
||||
self.check_pat_range(pat.span, lhs, rhs, expected, &pat_info.top_info)
|
||||
}
|
||||
PatKind::Binding(ba, var_id, ident, sub) => {
|
||||
self.check_pat_ident(pat, ba, var_id, ident, sub, expected, pat_info)
|
||||
}
|
||||
PatKind::TupleStruct(ref qpath, subpats, ddpos) => {
|
||||
self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, pat_info)
|
||||
}
|
||||
PatKind::Struct(ref qpath, fields, has_rest_pat) => {
|
||||
self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, pat_info)
|
||||
}
|
||||
PatKind::Guard(pat, cond) => {
|
||||
self.check_pat(pat, expected, pat_info);
|
||||
self.check_expr_has_type_or_error(cond, self.tcx.types.bool, |_| {});
|
||||
expected
|
||||
}
|
||||
PatKind::Or(pats) => {
|
||||
for pat in pats {
|
||||
self.check_pat(pat, expected, pat_info);
|
||||
}
|
||||
expected
|
||||
}
|
||||
PatKind::Tuple(elements, ddpos) => {
|
||||
self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info)
|
||||
}
|
||||
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info),
|
||||
PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info),
|
||||
PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info),
|
||||
PatKind::Slice(before, slice, after) => {
|
||||
self.check_pat_slice(pat.span, before, slice, after, expected, pat_info)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -465,11 +498,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
///
|
||||
/// When the pattern is a path pattern, `opt_path_res` must be `Some(res)`.
|
||||
fn calc_adjust_mode(&self, pat: &'tcx Pat<'tcx>, opt_path_res: Option<Res>) -> AdjustMode {
|
||||
// When we perform destructuring assignment, we disable default match bindings, which are
|
||||
// unintuitive in this context.
|
||||
if !pat.default_binding_modes {
|
||||
return AdjustMode::Reset;
|
||||
}
|
||||
match &pat.kind {
|
||||
// Type checking these product-like types successfully always require
|
||||
// that the expected type be of those types and not reference types.
|
||||
|
@ -526,64 +554,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Peel off as many immediately nested `& mut?` from the expected type as possible
|
||||
/// and return the new expected type and binding default binding mode.
|
||||
/// The adjustments vector, if non-empty is stored in a table.
|
||||
fn peel_off_references(
|
||||
&self,
|
||||
pat: &'tcx Pat<'tcx>,
|
||||
expected: Ty<'tcx>,
|
||||
mut def_br: ByRef,
|
||||
mut max_ref_mutbl: MutblCap,
|
||||
) -> (Ty<'tcx>, ByRef, MutblCap) {
|
||||
let mut expected = self.try_structurally_resolve_type(pat.span, expected);
|
||||
// Peel off as many `&` or `&mut` from the scrutinee type as possible. For example,
|
||||
// for `match &&&mut Some(5)` the loop runs three times, aborting when it reaches
|
||||
// the `Some(5)` which is not of type Ref.
|
||||
//
|
||||
// For each ampersand peeled off, update the binding mode and push the original
|
||||
// type into the adjustments vector.
|
||||
//
|
||||
// See the examples in `ui/match-defbm*.rs`.
|
||||
let mut pat_adjustments = vec![];
|
||||
while let ty::Ref(_, inner_ty, inner_mutability) = *expected.kind() {
|
||||
debug!("inspecting {:?}", expected);
|
||||
|
||||
debug!("current discriminant is Ref, inserting implicit deref");
|
||||
// Preserve the reference type. We'll need it later during THIR lowering.
|
||||
pat_adjustments.push(expected);
|
||||
|
||||
expected = self.try_structurally_resolve_type(pat.span, inner_ty);
|
||||
def_br = ByRef::Yes(match def_br {
|
||||
// If default binding mode is by value, make it `ref` or `ref mut`
|
||||
// (depending on whether we observe `&` or `&mut`).
|
||||
ByRef::No |
|
||||
// When `ref mut`, stay a `ref mut` (on `&mut`) or downgrade to `ref` (on `&`).
|
||||
ByRef::Yes(Mutability::Mut) => inner_mutability,
|
||||
// Once a `ref`, always a `ref`.
|
||||
// This is because a `& &mut` cannot mutate the underlying value.
|
||||
ByRef::Yes(Mutability::Not) => Mutability::Not,
|
||||
});
|
||||
}
|
||||
|
||||
if self.downgrade_mut_inside_shared() {
|
||||
def_br = def_br.cap_ref_mutability(max_ref_mutbl.as_mutbl());
|
||||
}
|
||||
if def_br == ByRef::Yes(Mutability::Not) {
|
||||
max_ref_mutbl = MutblCap::Not;
|
||||
}
|
||||
|
||||
if !pat_adjustments.is_empty() {
|
||||
debug!("default binding mode is now {:?}", def_br);
|
||||
self.typeck_results
|
||||
.borrow_mut()
|
||||
.pat_adjustments_mut()
|
||||
.insert(pat.hir_id, pat_adjustments);
|
||||
}
|
||||
|
||||
(expected, def_br, max_ref_mutbl)
|
||||
}
|
||||
|
||||
fn check_pat_expr_unadjusted(&self, lt: &'tcx hir::PatExpr<'tcx>) -> Ty<'tcx> {
|
||||
let ty = match <.kind {
|
||||
rustc_hir::PatExprKind::Lit { lit, negated } => {
|
||||
|
|
|
@ -5,7 +5,6 @@ edition = "2024"
|
|||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
rustc-rayon = { version = "0.5.0" }
|
||||
rustc-rayon-core = { version = "0.5.0" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_lowering = { path = "../rustc_ast_lowering" }
|
||||
|
|
|
@ -348,6 +348,10 @@ pub struct Config {
|
|||
/// the list of queries.
|
||||
pub override_queries: Option<fn(&Session, &mut Providers)>,
|
||||
|
||||
/// An extra set of symbols to add to the symbol interner, the symbol indices
|
||||
/// will start at [`PREDEFINED_SYMBOLS_COUNT`](rustc_span::symbol::PREDEFINED_SYMBOLS_COUNT)
|
||||
pub extra_symbols: Vec<&'static str>,
|
||||
|
||||
/// This is a callback from the driver that is called to create a codegen backend.
|
||||
///
|
||||
/// Has no uses within this repository, but is used by bjorn3 for "the
|
||||
|
@ -409,6 +413,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
|
|||
&early_dcx,
|
||||
config.opts.edition,
|
||||
config.opts.unstable_opts.threads,
|
||||
&config.extra_symbols,
|
||||
SourceMapInputs { file_loader, path_mapping, hash_kind, checksum_hash_kind },
|
||||
|current_gcx| {
|
||||
// The previous `early_dcx` can't be reused here because it doesn't
|
||||
|
|
|
@ -800,6 +800,7 @@ pub fn create_and_enter_global_ctxt<T, F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> T>(
|
|||
sess.opts.cg.metadata.clone(),
|
||||
sess.cfg_version,
|
||||
);
|
||||
|
||||
let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
|
||||
|
||||
let dep_type = DepsType { dep_names: rustc_query_impl::dep_kind_names() };
|
||||
|
|
|
@ -53,7 +53,7 @@ where
|
|||
checksum_hash_kind,
|
||||
});
|
||||
|
||||
rustc_span::create_session_globals_then(DEFAULT_EDITION, sm_inputs, || {
|
||||
rustc_span::create_session_globals_then(DEFAULT_EDITION, &[], sm_inputs, || {
|
||||
let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
|
||||
let io = CompilerIO {
|
||||
input: Input::Str { name: FileName::Custom(String::new()), input: String::new() },
|
||||
|
|
|
@ -117,6 +117,7 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
|
|||
thread_stack_size: usize,
|
||||
edition: Edition,
|
||||
sm_inputs: SourceMapInputs,
|
||||
extra_symbols: &[&'static str],
|
||||
f: F,
|
||||
) -> R {
|
||||
// The "thread pool" is a single spawned thread in the non-parallel
|
||||
|
@ -134,9 +135,12 @@ fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
|
|||
// name contains null bytes.
|
||||
let r = builder
|
||||
.spawn_scoped(s, move || {
|
||||
rustc_span::create_session_globals_then(edition, Some(sm_inputs), || {
|
||||
f(CurrentGcx::new())
|
||||
})
|
||||
rustc_span::create_session_globals_then(
|
||||
edition,
|
||||
extra_symbols,
|
||||
Some(sm_inputs),
|
||||
|| f(CurrentGcx::new()),
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.join();
|
||||
|
@ -152,6 +156,7 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
|
|||
thread_builder_diag: &EarlyDiagCtxt,
|
||||
edition: Edition,
|
||||
threads: usize,
|
||||
extra_symbols: &[&'static str],
|
||||
sm_inputs: SourceMapInputs,
|
||||
f: F,
|
||||
) -> R {
|
||||
|
@ -168,18 +173,24 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
|
|||
let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap());
|
||||
|
||||
if !sync::is_dyn_thread_safe() {
|
||||
return run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, |current_gcx| {
|
||||
// Register the thread for use with the `WorkerLocal` type.
|
||||
registry.register();
|
||||
return run_in_thread_with_globals(
|
||||
thread_stack_size,
|
||||
edition,
|
||||
sm_inputs,
|
||||
extra_symbols,
|
||||
|current_gcx| {
|
||||
// Register the thread for use with the `WorkerLocal` type.
|
||||
registry.register();
|
||||
|
||||
f(current_gcx)
|
||||
});
|
||||
f(current_gcx)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let current_gcx = FromDyn::from(CurrentGcx::new());
|
||||
let current_gcx2 = current_gcx.clone();
|
||||
|
||||
let builder = rayon::ThreadPoolBuilder::new()
|
||||
let builder = rayon_core::ThreadPoolBuilder::new()
|
||||
.thread_name(|_| "rustc".to_string())
|
||||
.acquire_thread_handler(jobserver::acquire_thread)
|
||||
.release_thread_handler(jobserver::release_thread)
|
||||
|
@ -230,13 +241,13 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
|
|||
// pool. Upon creation, each worker thread created gets a copy of the
|
||||
// session globals in TLS. This is possible because `SessionGlobals` impls
|
||||
// `Send` in the parallel compiler.
|
||||
rustc_span::create_session_globals_then(edition, Some(sm_inputs), || {
|
||||
rustc_span::create_session_globals_then(edition, extra_symbols, Some(sm_inputs), || {
|
||||
rustc_span::with_session_globals(|session_globals| {
|
||||
let session_globals = FromDyn::from(session_globals);
|
||||
builder
|
||||
.build_scoped(
|
||||
// Initialize each new worker thread when created.
|
||||
move |thread: rayon::ThreadBuilder| {
|
||||
move |thread: rayon_core::ThreadBuilder| {
|
||||
// Register the thread for use with the `WorkerLocal` type.
|
||||
registry.register();
|
||||
|
||||
|
@ -245,7 +256,9 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send,
|
|||
})
|
||||
},
|
||||
// Run `f` on the first thread in the thread pool.
|
||||
move |pool: &rayon::ThreadPool| pool.install(|| f(current_gcx.into_inner())),
|
||||
move |pool: &rayon_core::ThreadPool| {
|
||||
pool.install(|| f(current_gcx.into_inner()))
|
||||
},
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
|
|
|
@ -859,7 +859,7 @@ impl<'tcx> LateContext<'tcx> {
|
|||
) -> Option<Ty<'tcx>> {
|
||||
let tcx = self.tcx;
|
||||
tcx.associated_items(trait_id)
|
||||
.find_by_name_and_kind(tcx, Ident::from_str(name), ty::AssocKind::Type, trait_id)
|
||||
.find_by_ident_and_kind(tcx, Ident::from_str(name), ty::AssocKind::Type, trait_id)
|
||||
.and_then(|assoc| {
|
||||
let proj = Ty::new_projection(tcx, assoc.def_id, [self_ty]);
|
||||
tcx.try_normalize_erasing_regions(self.typing_env(), proj).ok()
|
||||
|
|
|
@ -423,11 +423,11 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase {
|
|||
}
|
||||
|
||||
fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &hir::TraitItem<'_>) {
|
||||
if let hir::TraitItemKind::Fn(_, hir::TraitFn::Required(pnames)) = item.kind {
|
||||
if let hir::TraitItemKind::Fn(_, hir::TraitFn::Required(param_idents)) = item.kind {
|
||||
self.check_snake_case(cx, "trait method", &item.ident);
|
||||
for param_name in pnames {
|
||||
if let Some(param_name) = param_name {
|
||||
self.check_snake_case(cx, "variable", param_name);
|
||||
for param_ident in param_idents {
|
||||
if let Some(param_ident) = param_ident {
|
||||
self.check_snake_case(cx, "variable", param_ident);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -142,13 +142,13 @@ pub(super) fn symbols(input: TokenStream) -> TokenStream {
|
|||
output
|
||||
}
|
||||
|
||||
struct Preinterned {
|
||||
struct Predefined {
|
||||
idx: u32,
|
||||
span_of_name: Span,
|
||||
}
|
||||
|
||||
struct Entries {
|
||||
map: HashMap<String, Preinterned>,
|
||||
map: HashMap<String, Predefined>,
|
||||
}
|
||||
|
||||
impl Entries {
|
||||
|
@ -163,7 +163,7 @@ impl Entries {
|
|||
prev.idx
|
||||
} else {
|
||||
let idx = self.len();
|
||||
self.map.insert(s.to_string(), Preinterned { idx, span_of_name: span });
|
||||
self.map.insert(s.to_string(), Predefined { idx, span_of_name: span });
|
||||
idx
|
||||
}
|
||||
}
|
||||
|
@ -295,10 +295,14 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
|
|||
}
|
||||
|
||||
let symbol_digits_base = entries.map["0"].idx;
|
||||
let preinterned_symbols_count = entries.len();
|
||||
let predefined_symbols_count = entries.len();
|
||||
let output = quote! {
|
||||
const SYMBOL_DIGITS_BASE: u32 = #symbol_digits_base;
|
||||
const PREINTERNED_SYMBOLS_COUNT: u32 = #preinterned_symbols_count;
|
||||
|
||||
/// The number of predefined symbols; this is the the first index for
|
||||
/// extra pre-interned symbols in an Interner created via
|
||||
/// [`Interner::with_extra_symbols`].
|
||||
pub const PREDEFINED_SYMBOLS_COUNT: u32 = #predefined_symbols_count;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(non_upper_case_globals)]
|
||||
|
@ -315,10 +319,13 @@ fn symbols_with_errors(input: TokenStream) -> (TokenStream, Vec<syn::Error>) {
|
|||
}
|
||||
|
||||
impl Interner {
|
||||
pub(crate) fn fresh() -> Self {
|
||||
Interner::prefill(&[
|
||||
#prefill_stream
|
||||
])
|
||||
/// Creates an `Interner` with the predefined symbols from the `symbols!` macro and
|
||||
/// any extra symbols provided by external drivers such as Clippy
|
||||
pub(crate) fn with_extra_symbols(extra_symbols: &[&'static str]) -> Self {
|
||||
Interner::prefill(
|
||||
&[#prefill_stream],
|
||||
extra_symbols,
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -562,9 +562,9 @@ impl<'a, 'tcx> SpanDecoder for DecodeContext<'a, 'tcx> {
|
|||
Symbol::intern(s)
|
||||
})
|
||||
}
|
||||
SYMBOL_PREINTERNED => {
|
||||
SYMBOL_PREDEFINED => {
|
||||
let symbol_index = self.read_u32();
|
||||
Symbol::new_from_decoded(symbol_index)
|
||||
Symbol::new(symbol_index)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
@ -1313,7 +1313,7 @@ impl<'a> CrateMetadataRef<'a> {
|
|||
fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool {
|
||||
self.root
|
||||
.tables
|
||||
.fn_arg_names
|
||||
.fn_arg_idents
|
||||
.get(self, id)
|
||||
.expect("argument names not encoded for a function")
|
||||
.decode((self, sess))
|
||||
|
|
|
@ -286,7 +286,7 @@ provide! { tcx, def_id, other, cdata,
|
|||
rendered_const => { table }
|
||||
rendered_precise_capturing_args => { table }
|
||||
asyncness => { table_direct }
|
||||
fn_arg_names => { table }
|
||||
fn_arg_idents => { table }
|
||||
coroutine_kind => { table_direct }
|
||||
coroutine_for_closure => { table }
|
||||
coroutine_by_move_body_def_id => { table }
|
||||
|
|
|
@ -201,9 +201,9 @@ impl<'a, 'tcx> SpanEncoder for EncodeContext<'a, 'tcx> {
|
|||
}
|
||||
|
||||
fn encode_symbol(&mut self, symbol: Symbol) {
|
||||
// if symbol preinterned, emit tag and symbol index
|
||||
if symbol.is_preinterned() {
|
||||
self.opaque.emit_u8(SYMBOL_PREINTERNED);
|
||||
// if symbol predefined, emit tag and symbol index
|
||||
if symbol.is_predefined() {
|
||||
self.opaque.emit_u8(SYMBOL_PREDEFINED);
|
||||
self.opaque.emit_u32(symbol.as_u32());
|
||||
} else {
|
||||
// otherwise write it as string or as offset to it
|
||||
|
@ -1469,7 +1469,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||
}
|
||||
if let DefKind::Fn | DefKind::AssocFn = def_kind {
|
||||
self.tables.asyncness.set_some(def_id.index, tcx.asyncness(def_id));
|
||||
record_array!(self.tables.fn_arg_names[def_id] <- tcx.fn_arg_names(def_id));
|
||||
record_array!(self.tables.fn_arg_idents[def_id] <- tcx.fn_arg_idents(def_id));
|
||||
}
|
||||
if let Some(name) = tcx.intrinsic(def_id) {
|
||||
record!(self.tables.intrinsic[def_id] <- name);
|
||||
|
@ -2199,7 +2199,7 @@ fn prefetch_mir(tcx: TyCtxt<'_>) {
|
|||
}
|
||||
|
||||
let reachable_set = tcx.reachable_set(());
|
||||
par_for_each_in(tcx.mir_keys(()), |&def_id| {
|
||||
par_for_each_in(tcx.mir_keys(()), |&&def_id| {
|
||||
let (encode_const, encode_opt) = should_encode_mir(tcx, reachable_set, def_id);
|
||||
|
||||
if encode_const {
|
||||
|
|
|
@ -451,7 +451,7 @@ define_tables! {
|
|||
rendered_const: Table<DefIndex, LazyValue<String>>,
|
||||
rendered_precise_capturing_args: Table<DefIndex, LazyArray<PreciseCapturingArgKind<Symbol, Symbol>>>,
|
||||
asyncness: Table<DefIndex, ty::Asyncness>,
|
||||
fn_arg_names: Table<DefIndex, LazyArray<Option<Ident>>>,
|
||||
fn_arg_idents: Table<DefIndex, LazyArray<Option<Ident>>>,
|
||||
coroutine_kind: Table<DefIndex, hir::CoroutineKind>,
|
||||
coroutine_for_closure: Table<DefIndex, RawDefId>,
|
||||
adt_destructor: Table<DefIndex, LazyValue<ty::Destructor>>,
|
||||
|
@ -586,7 +586,7 @@ impl SpanTag {
|
|||
// Tags for encoding Symbol's
|
||||
const SYMBOL_STR: u8 = 0;
|
||||
const SYMBOL_OFFSET: u8 = 1;
|
||||
const SYMBOL_PREINTERNED: u8 = 2;
|
||||
const SYMBOL_PREDEFINED: u8 = 2;
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
encoder::provide(providers);
|
||||
|
|
|
@ -281,7 +281,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn hir_body_param_names(self, id: BodyId) -> impl Iterator<Item = Option<Ident>> {
|
||||
pub fn hir_body_param_idents(self, id: BodyId) -> impl Iterator<Item = Option<Ident>> {
|
||||
self.hir_body(id).params.iter().map(|param| match param.pat.kind {
|
||||
PatKind::Binding(_, _, ident, _) => Some(ident),
|
||||
PatKind::Wild => Some(Ident::new(kw::Underscore, param.pat.span)),
|
||||
|
@ -343,7 +343,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
|
||||
#[inline]
|
||||
pub fn par_hir_body_owners(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.hir_crate_items(()).body_owners[..], |&def_id| f(def_id));
|
||||
par_for_each_in(&self.hir_crate_items(()).body_owners[..], |&&def_id| f(def_id));
|
||||
}
|
||||
|
||||
pub fn hir_ty_param_owner(self, def_id: LocalDefId) -> LocalDefId {
|
||||
|
|
|
@ -83,35 +83,35 @@ impl ModuleItems {
|
|||
&self,
|
||||
f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
try_par_for_each_in(&self.free_items[..], |&id| f(id))
|
||||
try_par_for_each_in(&self.free_items[..], |&&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_trait_items(
|
||||
&self,
|
||||
f: impl Fn(TraitItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
try_par_for_each_in(&self.trait_items[..], |&id| f(id))
|
||||
try_par_for_each_in(&self.trait_items[..], |&&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_impl_items(
|
||||
&self,
|
||||
f: impl Fn(ImplItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
try_par_for_each_in(&self.impl_items[..], |&id| f(id))
|
||||
try_par_for_each_in(&self.impl_items[..], |&&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_foreign_items(
|
||||
&self,
|
||||
f: impl Fn(ForeignItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
try_par_for_each_in(&self.foreign_items[..], |&id| f(id))
|
||||
try_par_for_each_in(&self.foreign_items[..], |&&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_opaques(
|
||||
&self,
|
||||
f: impl Fn(LocalDefId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
try_par_for_each_in(&self.opaques[..], |&id| f(id))
|
||||
try_par_for_each_in(&self.opaques[..], |&&id| f(id))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,9 +215,9 @@ pub fn provide(providers: &mut Providers) {
|
|||
let hir_id = tcx.local_def_id_to_hir_id(def_id);
|
||||
tcx.hir_opt_ident_span(hir_id)
|
||||
};
|
||||
providers.fn_arg_names = |tcx, def_id| {
|
||||
providers.fn_arg_idents = |tcx, def_id| {
|
||||
if let Some(body_id) = tcx.hir_node_by_def_id(def_id).body_id() {
|
||||
tcx.arena.alloc_from_iter(tcx.hir_body_param_names(body_id))
|
||||
tcx.arena.alloc_from_iter(tcx.hir_body_param_idents(body_id))
|
||||
} else if let Node::TraitItem(&TraitItem {
|
||||
kind: TraitItemKind::Fn(_, TraitFn::Required(idents)),
|
||||
..
|
||||
|
@ -231,7 +231,7 @@ pub fn provide(providers: &mut Providers) {
|
|||
} else {
|
||||
span_bug!(
|
||||
tcx.hir_span(tcx.local_def_id_to_hir_id(def_id)),
|
||||
"fn_arg_names: unexpected item {:?}",
|
||||
"fn_arg_idents: unexpected item {:?}",
|
||||
def_id
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1442,8 +1442,8 @@ rustc_queries! {
|
|||
desc { |tcx| "computing target features for inline asm of `{}`", tcx.def_path_str(def_id) }
|
||||
}
|
||||
|
||||
query fn_arg_names(def_id: DefId) -> &'tcx [Option<rustc_span::Ident>] {
|
||||
desc { |tcx| "looking up function parameter names for `{}`", tcx.def_path_str(def_id) }
|
||||
query fn_arg_idents(def_id: DefId) -> &'tcx [Option<rustc_span::Ident>] {
|
||||
desc { |tcx| "looking up function parameter identifiers for `{}`", tcx.def_path_str(def_id) }
|
||||
separate_provide_extern
|
||||
}
|
||||
|
||||
|
@ -1900,6 +1900,11 @@ rustc_queries! {
|
|||
|
||||
// The macro which defines `rustc_metadata::provide_extern` depends on this query's name.
|
||||
// Changing the name should cause a compiler error, but in case that changes, be aware.
|
||||
//
|
||||
// The hash should not be calculated before the `analysis` pass is complete, specifically
|
||||
// until `tcx.untracked().definitions.freeze()` has been called, otherwise if incremental
|
||||
// compilation is enabled calculating this hash can freeze this structure too early in
|
||||
// compilation and cause subsequent crashes when attempting to write to `definitions`
|
||||
query crate_hash(_: CrateNum) -> Svh {
|
||||
eval_always
|
||||
desc { "looking up the hash a crate" }
|
||||
|
|
|
@ -46,7 +46,7 @@ const TAG_EXPN_DATA: u8 = 1;
|
|||
// Tags for encoding Symbol's
|
||||
const SYMBOL_STR: u8 = 0;
|
||||
const SYMBOL_OFFSET: u8 = 1;
|
||||
const SYMBOL_PREINTERNED: u8 = 2;
|
||||
const SYMBOL_PREDEFINED: u8 = 2;
|
||||
|
||||
/// Provides an interface to incremental compilation data cached from the
|
||||
/// previous compilation session. This data will eventually include the results
|
||||
|
@ -674,9 +674,9 @@ impl<'a, 'tcx> SpanDecoder for CacheDecoder<'a, 'tcx> {
|
|||
Symbol::intern(s)
|
||||
})
|
||||
}
|
||||
SYMBOL_PREINTERNED => {
|
||||
SYMBOL_PREDEFINED => {
|
||||
let symbol_index = self.read_u32();
|
||||
Symbol::new_from_decoded(symbol_index)
|
||||
Symbol::new(symbol_index)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
@ -892,9 +892,9 @@ impl<'a, 'tcx> SpanEncoder for CacheEncoder<'a, 'tcx> {
|
|||
|
||||
// copy&paste impl from rustc_metadata
|
||||
fn encode_symbol(&mut self, symbol: Symbol) {
|
||||
// if symbol preinterned, emit tag and symbol index
|
||||
if symbol.is_preinterned() {
|
||||
self.encoder.emit_u8(SYMBOL_PREINTERNED);
|
||||
// if symbol predefined, emit tag and symbol index
|
||||
if symbol.is_predefined() {
|
||||
self.encoder.emit_u8(SYMBOL_PREDEFINED);
|
||||
self.encoder.emit_u32(symbol.as_u32());
|
||||
} else {
|
||||
// otherwise write it as string or as offset to it
|
||||
|
|
|
@ -98,10 +98,10 @@ impl AssocItem {
|
|||
|
||||
pub fn descr(&self) -> &'static str {
|
||||
match self.kind {
|
||||
ty::AssocKind::Const => "const",
|
||||
ty::AssocKind::Const => "associated const",
|
||||
ty::AssocKind::Fn if self.fn_has_self_parameter => "method",
|
||||
ty::AssocKind::Fn => "associated function",
|
||||
ty::AssocKind::Type => "type",
|
||||
ty::AssocKind::Type => "associated type",
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -155,6 +155,8 @@ impl AssocKind {
|
|||
impl std::fmt::Display for AssocKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
// FIXME: fails to distinguish between "associated function" and
|
||||
// "method" because `has_self` isn't known here.
|
||||
AssocKind::Fn => write!(f, "method"),
|
||||
AssocKind::Const => write!(f, "associated const"),
|
||||
AssocKind::Type => write!(f, "associated type"),
|
||||
|
@ -199,8 +201,9 @@ impl AssocItems {
|
|||
self.items.get_by_key(name)
|
||||
}
|
||||
|
||||
/// Returns the associated item with the given name and `AssocKind`, if one exists.
|
||||
pub fn find_by_name_and_kind(
|
||||
/// Returns the associated item with the given identifier and `AssocKind`, if one exists.
|
||||
/// The identifier is matched hygienically.
|
||||
pub fn find_by_ident_and_kind(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
ident: Ident,
|
||||
|
@ -212,8 +215,9 @@ impl AssocItems {
|
|||
.find(|item| tcx.hygienic_eq(ident, item.ident(tcx), parent_def_id))
|
||||
}
|
||||
|
||||
/// Returns the associated item with the given name and any of `AssocKind`, if one exists.
|
||||
pub fn find_by_name_and_kinds(
|
||||
/// Returns the associated item with the given identifier and any of `AssocKind`, if one
|
||||
/// exists. The identifier is matched hygienically.
|
||||
pub fn find_by_ident_and_kinds(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
ident: Ident,
|
||||
|
@ -221,11 +225,12 @@ impl AssocItems {
|
|||
kinds: &[AssocKind],
|
||||
parent_def_id: DefId,
|
||||
) -> Option<&ty::AssocItem> {
|
||||
kinds.iter().find_map(|kind| self.find_by_name_and_kind(tcx, ident, *kind, parent_def_id))
|
||||
kinds.iter().find_map(|kind| self.find_by_ident_and_kind(tcx, ident, *kind, parent_def_id))
|
||||
}
|
||||
|
||||
/// Returns the associated item with the given name in the given `Namespace`, if one exists.
|
||||
pub fn find_by_name_and_namespace(
|
||||
/// Returns the associated item with the given identifier in the given `Namespace`, if one
|
||||
/// exists. The identifier is matched hygienically.
|
||||
pub fn find_by_ident_and_namespace(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
ident: Ident,
|
||||
|
|
|
@ -279,7 +279,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
p.hash(&mut s);
|
||||
let hash = s.finish();
|
||||
*path = Some(path.take().unwrap_or_else(|| {
|
||||
self.output_filenames(()).temp_path_ext(&format!("long-type-{hash}.txt"), None)
|
||||
self.output_filenames(()).temp_path_for_diagnostic(&format!("long-type-{hash}.txt"))
|
||||
}));
|
||||
let Ok(mut file) =
|
||||
File::options().create(true).read(true).append(true).open(&path.as_ref().unwrap())
|
||||
|
|
|
@ -1939,15 +1939,15 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||
/// Hygienically compares a use-site name (`use_name`) for a field or an associated item with
|
||||
/// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed
|
||||
/// definition's parent/scope to perform comparison.
|
||||
pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
|
||||
// We could use `Ident::eq` here, but we deliberately don't. The name
|
||||
pub fn hygienic_eq(self, use_ident: Ident, def_ident: Ident, def_parent_def_id: DefId) -> bool {
|
||||
// We could use `Ident::eq` here, but we deliberately don't. The identifier
|
||||
// comparison fails frequently, and we want to avoid the expensive
|
||||
// `normalize_to_macros_2_0()` calls required for the span comparison whenever possible.
|
||||
use_name.name == def_name.name
|
||||
&& use_name
|
||||
use_ident.name == def_ident.name
|
||||
&& use_ident
|
||||
.span
|
||||
.ctxt()
|
||||
.hygienic_eq(def_name.span.ctxt(), self.expn_that_defined(def_parent_def_id))
|
||||
.hygienic_eq(def_ident.span.ctxt(), self.expn_that_defined(def_parent_def_id))
|
||||
}
|
||||
|
||||
pub fn adjust_ident(self, mut ident: Ident, scope: DefId) -> Ident {
|
||||
|
|
|
@ -382,7 +382,7 @@ pub fn shrunk_instance_name<'tcx>(
|
|||
return (s, None);
|
||||
}
|
||||
|
||||
let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
|
||||
let path = tcx.output_filenames(()).temp_path_for_diagnostic("long-type.txt");
|
||||
let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
|
||||
|
||||
(shrunk, written_to_path)
|
||||
|
|
|
@ -620,7 +620,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
|||
// the children of the visible parent (as was done when computing
|
||||
// `visible_parent_map`), looking for the specific child we currently have and then
|
||||
// have access to the re-exported name.
|
||||
DefPathData::TypeNs(Some(ref mut name)) if Some(visible_parent) != actual_parent => {
|
||||
DefPathData::TypeNs(ref mut name) if Some(visible_parent) != actual_parent => {
|
||||
// Item might be re-exported several times, but filter for the one
|
||||
// that's public and whose identifier isn't `_`.
|
||||
let reexport = self
|
||||
|
@ -641,7 +641,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
|||
}
|
||||
// Re-exported `extern crate` (#43189).
|
||||
DefPathData::CrateRoot => {
|
||||
data = DefPathData::TypeNs(Some(self.tcx().crate_name(def_id.krate)));
|
||||
data = DefPathData::TypeNs(self.tcx().crate_name(def_id.krate));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ fn true_significant_drop_ty<'tcx>(
|
|||
name_rev.push(tcx.crate_name(did.krate));
|
||||
}
|
||||
rustc_hir::definitions::DefPathData::TypeNs(symbol) => {
|
||||
name_rev.push(symbol.unwrap());
|
||||
name_rev.push(symbol);
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
|
|
|
@ -39,26 +39,26 @@ impl<'tcx> crate::MirPass<'tcx> for InstSimplify {
|
|||
attr::contains_name(tcx.hir_krate_attrs(), sym::rustc_preserve_ub_checks);
|
||||
for block in body.basic_blocks.as_mut() {
|
||||
for statement in block.statements.iter_mut() {
|
||||
match statement.kind {
|
||||
StatementKind::Assign(box (_place, ref mut rvalue)) => {
|
||||
if !preserve_ub_checks {
|
||||
ctx.simplify_ub_check(rvalue);
|
||||
}
|
||||
ctx.simplify_bool_cmp(rvalue);
|
||||
ctx.simplify_ref_deref(rvalue);
|
||||
ctx.simplify_ptr_aggregate(rvalue);
|
||||
ctx.simplify_cast(rvalue);
|
||||
ctx.simplify_repeated_aggregate(rvalue);
|
||||
ctx.simplify_repeat_once(rvalue);
|
||||
}
|
||||
_ => {}
|
||||
let StatementKind::Assign(box (.., rvalue)) = &mut statement.kind else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !preserve_ub_checks {
|
||||
ctx.simplify_ub_check(rvalue);
|
||||
}
|
||||
ctx.simplify_bool_cmp(rvalue);
|
||||
ctx.simplify_ref_deref(rvalue);
|
||||
ctx.simplify_ptr_aggregate(rvalue);
|
||||
ctx.simplify_cast(rvalue);
|
||||
ctx.simplify_repeated_aggregate(rvalue);
|
||||
ctx.simplify_repeat_once(rvalue);
|
||||
}
|
||||
|
||||
ctx.simplify_primitive_clone(block.terminator.as_mut().unwrap(), &mut block.statements);
|
||||
ctx.simplify_intrinsic_assert(block.terminator.as_mut().unwrap());
|
||||
ctx.simplify_nounwind_call(block.terminator.as_mut().unwrap());
|
||||
simplify_duplicate_switch_targets(block.terminator.as_mut().unwrap());
|
||||
let terminator = block.terminator.as_mut().unwrap();
|
||||
ctx.simplify_primitive_clone(terminator, &mut block.statements);
|
||||
ctx.simplify_intrinsic_assert(terminator);
|
||||
ctx.simplify_nounwind_call(terminator);
|
||||
simplify_duplicate_switch_targets(terminator);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,43 +105,34 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
|
||||
/// Transform boolean comparisons into logical operations.
|
||||
fn simplify_bool_cmp(&self, rvalue: &mut Rvalue<'tcx>) {
|
||||
match rvalue {
|
||||
Rvalue::BinaryOp(op @ (BinOp::Eq | BinOp::Ne), box (a, b)) => {
|
||||
let new = match (op, self.try_eval_bool(a), self.try_eval_bool(b)) {
|
||||
// Transform "Eq(a, true)" ==> "a"
|
||||
(BinOp::Eq, _, Some(true)) => Some(Rvalue::Use(a.clone())),
|
||||
let Rvalue::BinaryOp(op @ (BinOp::Eq | BinOp::Ne), box (a, b)) = &*rvalue else { return };
|
||||
*rvalue = match (op, self.try_eval_bool(a), self.try_eval_bool(b)) {
|
||||
// Transform "Eq(a, true)" ==> "a"
|
||||
(BinOp::Eq, _, Some(true)) => Rvalue::Use(a.clone()),
|
||||
|
||||
// Transform "Ne(a, false)" ==> "a"
|
||||
(BinOp::Ne, _, Some(false)) => Some(Rvalue::Use(a.clone())),
|
||||
// Transform "Ne(a, false)" ==> "a"
|
||||
(BinOp::Ne, _, Some(false)) => Rvalue::Use(a.clone()),
|
||||
|
||||
// Transform "Eq(true, b)" ==> "b"
|
||||
(BinOp::Eq, Some(true), _) => Some(Rvalue::Use(b.clone())),
|
||||
// Transform "Eq(true, b)" ==> "b"
|
||||
(BinOp::Eq, Some(true), _) => Rvalue::Use(b.clone()),
|
||||
|
||||
// Transform "Ne(false, b)" ==> "b"
|
||||
(BinOp::Ne, Some(false), _) => Some(Rvalue::Use(b.clone())),
|
||||
// Transform "Ne(false, b)" ==> "b"
|
||||
(BinOp::Ne, Some(false), _) => Rvalue::Use(b.clone()),
|
||||
|
||||
// Transform "Eq(false, b)" ==> "Not(b)"
|
||||
(BinOp::Eq, Some(false), _) => Some(Rvalue::UnaryOp(UnOp::Not, b.clone())),
|
||||
// Transform "Eq(false, b)" ==> "Not(b)"
|
||||
(BinOp::Eq, Some(false), _) => Rvalue::UnaryOp(UnOp::Not, b.clone()),
|
||||
|
||||
// Transform "Ne(true, b)" ==> "Not(b)"
|
||||
(BinOp::Ne, Some(true), _) => Some(Rvalue::UnaryOp(UnOp::Not, b.clone())),
|
||||
// Transform "Ne(true, b)" ==> "Not(b)"
|
||||
(BinOp::Ne, Some(true), _) => Rvalue::UnaryOp(UnOp::Not, b.clone()),
|
||||
|
||||
// Transform "Eq(a, false)" ==> "Not(a)"
|
||||
(BinOp::Eq, _, Some(false)) => Some(Rvalue::UnaryOp(UnOp::Not, a.clone())),
|
||||
// Transform "Eq(a, false)" ==> "Not(a)"
|
||||
(BinOp::Eq, _, Some(false)) => Rvalue::UnaryOp(UnOp::Not, a.clone()),
|
||||
|
||||
// Transform "Ne(a, true)" ==> "Not(a)"
|
||||
(BinOp::Ne, _, Some(true)) => Some(Rvalue::UnaryOp(UnOp::Not, a.clone())),
|
||||
// Transform "Ne(a, true)" ==> "Not(a)"
|
||||
(BinOp::Ne, _, Some(true)) => Rvalue::UnaryOp(UnOp::Not, a.clone()),
|
||||
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(new) = new {
|
||||
*rvalue = new;
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
}
|
||||
|
||||
fn try_eval_bool(&self, a: &Operand<'_>) -> Option<bool> {
|
||||
|
@ -151,64 +142,58 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
|
||||
/// Transform `&(*a)` ==> `a`.
|
||||
fn simplify_ref_deref(&self, rvalue: &mut Rvalue<'tcx>) {
|
||||
if let Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) = rvalue {
|
||||
if let Some((base, ProjectionElem::Deref)) = place.as_ref().last_projection() {
|
||||
if rvalue.ty(self.local_decls, self.tcx) != base.ty(self.local_decls, self.tcx).ty {
|
||||
return;
|
||||
}
|
||||
|
||||
*rvalue = Rvalue::Use(Operand::Copy(Place {
|
||||
local: base.local,
|
||||
projection: self.tcx.mk_place_elems(base.projection),
|
||||
}));
|
||||
}
|
||||
if let Rvalue::Ref(_, _, place) | Rvalue::RawPtr(_, place) = rvalue
|
||||
&& let Some((base, ProjectionElem::Deref)) = place.as_ref().last_projection()
|
||||
&& rvalue.ty(self.local_decls, self.tcx) == base.ty(self.local_decls, self.tcx).ty
|
||||
{
|
||||
*rvalue = Rvalue::Use(Operand::Copy(Place {
|
||||
local: base.local,
|
||||
projection: self.tcx.mk_place_elems(base.projection),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
/// Transform `Aggregate(RawPtr, [p, ()])` ==> `Cast(PtrToPtr, p)`.
|
||||
fn simplify_ptr_aggregate(&self, rvalue: &mut Rvalue<'tcx>) {
|
||||
if let Rvalue::Aggregate(box AggregateKind::RawPtr(pointee_ty, mutability), fields) = rvalue
|
||||
&& let meta_ty = fields.raw[1].ty(self.local_decls, self.tcx)
|
||||
&& meta_ty.is_unit()
|
||||
{
|
||||
let meta_ty = fields.raw[1].ty(self.local_decls, self.tcx);
|
||||
if meta_ty.is_unit() {
|
||||
// The mutable borrows we're holding prevent printing `rvalue` here
|
||||
let mut fields = std::mem::take(fields);
|
||||
let _meta = fields.pop().unwrap();
|
||||
let data = fields.pop().unwrap();
|
||||
let ptr_ty = Ty::new_ptr(self.tcx, *pointee_ty, *mutability);
|
||||
*rvalue = Rvalue::Cast(CastKind::PtrToPtr, data, ptr_ty);
|
||||
}
|
||||
// The mutable borrows we're holding prevent printing `rvalue` here
|
||||
let mut fields = std::mem::take(fields);
|
||||
let _meta = fields.pop().unwrap();
|
||||
let data = fields.pop().unwrap();
|
||||
let ptr_ty = Ty::new_ptr(self.tcx, *pointee_ty, *mutability);
|
||||
*rvalue = Rvalue::Cast(CastKind::PtrToPtr, data, ptr_ty);
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify_ub_check(&self, rvalue: &mut Rvalue<'tcx>) {
|
||||
if let Rvalue::NullaryOp(NullOp::UbChecks, _) = *rvalue {
|
||||
let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks());
|
||||
let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None };
|
||||
*rvalue = Rvalue::Use(Operand::Constant(Box::new(constant)));
|
||||
}
|
||||
let Rvalue::NullaryOp(NullOp::UbChecks, _) = *rvalue else { return };
|
||||
|
||||
let const_ = Const::from_bool(self.tcx, self.tcx.sess.ub_checks());
|
||||
let constant = ConstOperand { span: DUMMY_SP, const_, user_ty: None };
|
||||
*rvalue = Rvalue::Use(Operand::Constant(Box::new(constant)));
|
||||
}
|
||||
|
||||
fn simplify_cast(&self, rvalue: &mut Rvalue<'tcx>) {
|
||||
if let Rvalue::Cast(kind, operand, cast_ty) = rvalue {
|
||||
let operand_ty = operand.ty(self.local_decls, self.tcx);
|
||||
if operand_ty == *cast_ty {
|
||||
*rvalue = Rvalue::Use(operand.clone());
|
||||
} else if *kind == CastKind::Transmute {
|
||||
// Transmuting an integer to another integer is just a signedness cast
|
||||
if let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) =
|
||||
(operand_ty.kind(), cast_ty.kind())
|
||||
&& int.bit_width() == uint.bit_width()
|
||||
{
|
||||
// The width check isn't strictly necessary, as different widths
|
||||
// are UB and thus we'd be allowed to turn it into a cast anyway.
|
||||
// But let's keep the UB around for codegen to exploit later.
|
||||
// (If `CastKind::Transmute` ever becomes *not* UB for mismatched sizes,
|
||||
// then the width check is necessary for big-endian correctness.)
|
||||
*kind = CastKind::IntToInt;
|
||||
return;
|
||||
}
|
||||
}
|
||||
let Rvalue::Cast(kind, operand, cast_ty) = rvalue else { return };
|
||||
|
||||
let operand_ty = operand.ty(self.local_decls, self.tcx);
|
||||
if operand_ty == *cast_ty {
|
||||
*rvalue = Rvalue::Use(operand.clone());
|
||||
} else if *kind == CastKind::Transmute
|
||||
// Transmuting an integer to another integer is just a signedness cast
|
||||
&& let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) =
|
||||
(operand_ty.kind(), cast_ty.kind())
|
||||
&& int.bit_width() == uint.bit_width()
|
||||
{
|
||||
// The width check isn't strictly necessary, as different widths
|
||||
// are UB and thus we'd be allowed to turn it into a cast anyway.
|
||||
// But let's keep the UB around for codegen to exploit later.
|
||||
// (If `CastKind::Transmute` ever becomes *not* UB for mismatched sizes,
|
||||
// then the width check is necessary for big-endian correctness.)
|
||||
*kind = CastKind::IntToInt;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,7 +262,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
}
|
||||
|
||||
fn simplify_nounwind_call(&self, terminator: &mut Terminator<'tcx>) {
|
||||
let TerminatorKind::Call { func, unwind, .. } = &mut terminator.kind else {
|
||||
let TerminatorKind::Call { ref func, ref mut unwind, .. } = terminator.kind else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
@ -290,7 +275,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
ty::FnDef(..) => body_ty.fn_sig(self.tcx).abi(),
|
||||
ty::Closure(..) => ExternAbi::RustCall,
|
||||
ty::Coroutine(..) => ExternAbi::Rust,
|
||||
_ => bug!("unexpected body ty: {:?}", body_ty),
|
||||
_ => bug!("unexpected body ty: {body_ty:?}"),
|
||||
};
|
||||
|
||||
if !layout::fn_can_unwind(self.tcx, Some(def_id), body_abi) {
|
||||
|
@ -299,10 +284,9 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
}
|
||||
|
||||
fn simplify_intrinsic_assert(&self, terminator: &mut Terminator<'tcx>) {
|
||||
let TerminatorKind::Call { func, target, .. } = &mut terminator.kind else {
|
||||
return;
|
||||
};
|
||||
let Some(target_block) = target else {
|
||||
let TerminatorKind::Call { ref func, target: ref mut target @ Some(target_block), .. } =
|
||||
terminator.kind
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let func_ty = func.ty(self.local_decls, self.tcx);
|
||||
|
@ -310,12 +294,10 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
return;
|
||||
};
|
||||
// The intrinsics we are interested in have one generic parameter
|
||||
if args.is_empty() {
|
||||
return;
|
||||
}
|
||||
let [arg, ..] = args[..] else { return };
|
||||
|
||||
let known_is_valid =
|
||||
intrinsic_assert_panics(self.tcx, self.typing_env, args[0], intrinsic_name);
|
||||
intrinsic_assert_panics(self.tcx, self.typing_env, arg, intrinsic_name);
|
||||
match known_is_valid {
|
||||
// We don't know the layout or it's not validity assertion at all, don't touch it
|
||||
None => {}
|
||||
|
@ -325,7 +307,7 @@ impl<'tcx> InstSimplifyContext<'_, 'tcx> {
|
|||
}
|
||||
Some(false) => {
|
||||
// If we know the assert does not panic, turn the call into a Goto
|
||||
terminator.kind = TerminatorKind::Goto { target: *target_block };
|
||||
terminator.kind = TerminatorKind::Goto { target: target_block };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -346,9 +328,7 @@ fn resolve_rust_intrinsic<'tcx>(
|
|||
tcx: TyCtxt<'tcx>,
|
||||
func_ty: Ty<'tcx>,
|
||||
) -> Option<(Symbol, GenericArgsRef<'tcx>)> {
|
||||
if let ty::FnDef(def_id, args) = *func_ty.kind() {
|
||||
let intrinsic = tcx.intrinsic(def_id)?;
|
||||
return Some((intrinsic.name, args));
|
||||
}
|
||||
None
|
||||
let ty::FnDef(def_id, args) = *func_ty.kind() else { return None };
|
||||
let intrinsic = tcx.intrinsic(def_id)?;
|
||||
Some((intrinsic.name, args))
|
||||
}
|
||||
|
|
|
@ -1689,7 +1689,7 @@ pub(crate) fn collect_crate_mono_items<'tcx>(
|
|||
let mut recursion_depths = DefIdMap::default();
|
||||
collect_items_rec(
|
||||
tcx,
|
||||
dummy_spanned(root),
|
||||
dummy_spanned(*root),
|
||||
&state,
|
||||
&mut recursion_depths,
|
||||
recursion_limit,
|
||||
|
|
|
@ -191,7 +191,7 @@ impl<'tcx> MoveCheckVisitor<'tcx> {
|
|||
|
||||
fn assoc_fn_of_type<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, fn_ident: Ident) -> Option<DefId> {
|
||||
for impl_def_id in tcx.inherent_impls(def_id) {
|
||||
if let Some(new) = tcx.associated_items(impl_def_id).find_by_name_and_kind(
|
||||
if let Some(new) = tcx.associated_items(impl_def_id).find_by_ident_and_kind(
|
||||
tcx,
|
||||
fn_ident,
|
||||
AssocKind::Fn,
|
||||
|
|
|
@ -2166,10 +2166,15 @@ impl<'a> Parser<'a> {
|
|||
let expr = self
|
||||
.eat_metavar_seq(mv_kind, |this| this.parse_expr())
|
||||
.expect("metavar seq expr");
|
||||
let ast::ExprKind::Lit(token_lit) = expr.kind else {
|
||||
panic!("didn't reparse an expr");
|
||||
};
|
||||
Some(token_lit)
|
||||
if let ast::ExprKind::Lit(token_lit) = expr.kind {
|
||||
Some(token_lit)
|
||||
} else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind
|
||||
&& let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner
|
||||
{
|
||||
None
|
||||
} else {
|
||||
panic!("unexpected reparsed expr: {:?}", expr.kind);
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
|
|
|
@ -602,21 +602,13 @@ impl<'a> Parser<'a> {
|
|||
let polarity = self.parse_polarity();
|
||||
|
||||
// Parse both types and traits as a type, then reinterpret if necessary.
|
||||
let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span));
|
||||
let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
|
||||
{
|
||||
let span = self.prev_token.span.between(self.token.span);
|
||||
self.dcx().emit_err(errors::MissingTraitInTraitImpl {
|
||||
return Err(self.dcx().create_err(errors::MissingTraitInTraitImpl {
|
||||
span,
|
||||
for_span: span.to(self.token.span),
|
||||
});
|
||||
|
||||
P(Ty {
|
||||
kind: TyKind::Path(None, err_path(span)),
|
||||
span,
|
||||
id: DUMMY_NODE_ID,
|
||||
tokens: None,
|
||||
})
|
||||
}));
|
||||
} else {
|
||||
self.parse_ty_with_generics_recovery(&generics)?
|
||||
};
|
||||
|
@ -657,6 +649,7 @@ impl<'a> Parser<'a> {
|
|||
other => {
|
||||
if let TyKind::ImplTrait(_, bounds) = other
|
||||
&& let [bound] = bounds.as_slice()
|
||||
&& let GenericBound::Trait(poly_trait_ref) = bound
|
||||
{
|
||||
// Suggest removing extra `impl` keyword:
|
||||
// `impl<T: Default> impl Default for Wrapper<T>`
|
||||
|
@ -666,12 +659,12 @@ impl<'a> Parser<'a> {
|
|||
extra_impl_kw,
|
||||
impl_trait_span: ty_first.span,
|
||||
});
|
||||
poly_trait_ref.trait_ref.path.clone()
|
||||
} else {
|
||||
self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType {
|
||||
span: ty_first.span,
|
||||
});
|
||||
return Err(self.dcx().create_err(
|
||||
errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span },
|
||||
));
|
||||
}
|
||||
err_path(ty_first.span)
|
||||
}
|
||||
};
|
||||
let trait_ref = TraitRef { path, ref_id: ty_first.id };
|
||||
|
|
|
@ -141,7 +141,7 @@ impl<D: Deps> DepGraph<D> {
|
|||
let colors = DepNodeColorMap::new(prev_graph_node_count);
|
||||
|
||||
// Instantiate a node with zero dependencies only once for anonymous queries.
|
||||
let _green_node_index = current.alloc_node(
|
||||
let _green_node_index = current.alloc_new_node(
|
||||
DepNode { kind: D::DEP_KIND_ANON_ZERO_DEPS, hash: current.anon_id_seed.into() },
|
||||
EdgesVec::new(),
|
||||
Fingerprint::ZERO,
|
||||
|
@ -149,7 +149,7 @@ impl<D: Deps> DepGraph<D> {
|
|||
assert_eq!(_green_node_index, DepNodeIndex::SINGLETON_ZERO_DEPS_ANON_NODE);
|
||||
|
||||
// Instantiate a dependy-less red node only once for anonymous queries.
|
||||
let red_node_index = current.alloc_node(
|
||||
let red_node_index = current.alloc_new_node(
|
||||
DepNode { kind: D::DEP_KIND_RED, hash: Fingerprint::ZERO.into() },
|
||||
EdgesVec::new(),
|
||||
Fingerprint::ZERO,
|
||||
|
@ -438,7 +438,7 @@ impl<D: Deps> DepGraphData<D> {
|
|||
// memory impact of this `anon_node_to_index` map remains tolerable, and helps
|
||||
// us avoid useless growth of the graph with almost-equivalent nodes.
|
||||
self.current.anon_node_to_index.get_or_insert_with(target_dep_node, || {
|
||||
self.current.alloc_node(target_dep_node, task_deps, Fingerprint::ZERO)
|
||||
self.current.alloc_new_node(target_dep_node, task_deps, Fingerprint::ZERO)
|
||||
})
|
||||
}
|
||||
};
|
||||
|
@ -680,8 +680,8 @@ impl<D: Deps> DepGraphData<D> {
|
|||
qcx: Qcx,
|
||||
diagnostic: &DiagInner,
|
||||
) -> DepNodeIndex {
|
||||
// Use `send` so we get an unique index, even though the dep node is not.
|
||||
let dep_node_index = self.current.encoder.send(
|
||||
// Use `send_new` so we get an unique index, even though the dep node is not.
|
||||
let dep_node_index = self.current.encoder.send_new(
|
||||
DepNode {
|
||||
kind: D::DEP_KIND_SIDE_EFFECT,
|
||||
hash: PackedFingerprint::from(Fingerprint::ZERO),
|
||||
|
@ -713,20 +713,22 @@ impl<D: Deps> DepGraphData<D> {
|
|||
}
|
||||
}
|
||||
|
||||
// Manually recreate the node as `promote_node_and_deps_to_current` expects all
|
||||
// green dependencies.
|
||||
let dep_node_index = self.current.encoder.send(
|
||||
// Use `send_and_color` as `promote_node_and_deps_to_current` expects all
|
||||
// green dependencies. `send_and_color` will also prevent multiple nodes
|
||||
// being encoded for concurrent calls.
|
||||
let dep_node_index = self.current.encoder.send_and_color(
|
||||
prev_index,
|
||||
&self.colors,
|
||||
DepNode {
|
||||
kind: D::DEP_KIND_SIDE_EFFECT,
|
||||
hash: PackedFingerprint::from(Fingerprint::ZERO),
|
||||
},
|
||||
Fingerprint::ZERO,
|
||||
std::iter::once(DepNodeIndex::FOREVER_RED_NODE).collect(),
|
||||
true,
|
||||
);
|
||||
// This will just overwrite the same value for concurrent calls.
|
||||
qcx.store_side_effect(dep_node_index, side_effect);
|
||||
|
||||
// Mark the node as green.
|
||||
self.colors.insert(prev_index, DepNodeColor::Green(dep_node_index));
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -736,38 +738,43 @@ impl<D: Deps> DepGraphData<D> {
|
|||
edges: EdgesVec,
|
||||
fingerprint: Option<Fingerprint>,
|
||||
) -> DepNodeIndex {
|
||||
let dep_node_index =
|
||||
self.current.alloc_node(key, edges, fingerprint.unwrap_or(Fingerprint::ZERO));
|
||||
|
||||
if let Some(prev_index) = self.previous.node_to_index_opt(&key) {
|
||||
// Determine the color and index of the new `DepNode`.
|
||||
let color = if let Some(fingerprint) = fingerprint {
|
||||
let is_green = if let Some(fingerprint) = fingerprint {
|
||||
if fingerprint == self.previous.fingerprint_by_index(prev_index) {
|
||||
// This is a green node: it existed in the previous compilation,
|
||||
// its query was re-executed, and it has the same result as before.
|
||||
DepNodeColor::Green(dep_node_index)
|
||||
true
|
||||
} else {
|
||||
// This is a red node: it existed in the previous compilation, its query
|
||||
// was re-executed, but it has a different result from before.
|
||||
DepNodeColor::Red
|
||||
false
|
||||
}
|
||||
} else {
|
||||
// This is a red node, effectively: it existed in the previous compilation
|
||||
// session, its query was re-executed, but it doesn't compute a result hash
|
||||
// (i.e. it represents a `no_hash` query), so we have no way of determining
|
||||
// whether or not the result was the same as before.
|
||||
DepNodeColor::Red
|
||||
false
|
||||
};
|
||||
|
||||
debug_assert!(
|
||||
self.colors.get(prev_index).is_none(),
|
||||
"DepGraph::with_task() - Duplicate DepNodeColor insertion for {key:?}",
|
||||
let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);
|
||||
|
||||
let dep_node_index = self.current.encoder.send_and_color(
|
||||
prev_index,
|
||||
&self.colors,
|
||||
key,
|
||||
fingerprint,
|
||||
edges,
|
||||
is_green,
|
||||
);
|
||||
|
||||
self.colors.insert(prev_index, color);
|
||||
}
|
||||
self.current.record_node(dep_node_index, key, fingerprint);
|
||||
|
||||
dep_node_index
|
||||
dep_node_index
|
||||
} else {
|
||||
self.current.alloc_new_node(key, edges, fingerprint.unwrap_or(Fingerprint::ZERO))
|
||||
}
|
||||
}
|
||||
|
||||
fn promote_node_and_deps_to_current(&self, prev_index: SerializedDepNodeIndex) -> DepNodeIndex {
|
||||
|
@ -1246,19 +1253,15 @@ impl<D: Deps> CurrentDepGraph<D> {
|
|||
assert_eq!(previous, fingerprint, "Unstable fingerprints for {:?}", key);
|
||||
}
|
||||
|
||||
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
|
||||
/// Assumes that this is a node that has no equivalent in the previous dep-graph.
|
||||
#[inline(always)]
|
||||
fn alloc_node(
|
||||
fn record_node(
|
||||
&self,
|
||||
dep_node_index: DepNodeIndex,
|
||||
key: DepNode,
|
||||
edges: EdgesVec,
|
||||
current_fingerprint: Fingerprint,
|
||||
) -> DepNodeIndex {
|
||||
let dep_node_index = self.encoder.send(key, current_fingerprint, edges);
|
||||
|
||||
_current_fingerprint: Fingerprint,
|
||||
) {
|
||||
#[cfg(debug_assertions)]
|
||||
self.record_edge(dep_node_index, key, current_fingerprint);
|
||||
self.record_edge(dep_node_index, key, _current_fingerprint);
|
||||
|
||||
if let Some(ref nodes_in_current_session) = self.nodes_in_current_session {
|
||||
outline(|| {
|
||||
|
@ -1267,6 +1270,20 @@ impl<D: Deps> CurrentDepGraph<D> {
|
|||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
|
||||
/// Assumes that this is a node that has no equivalent in the previous dep-graph.
|
||||
#[inline(always)]
|
||||
fn alloc_new_node(
|
||||
&self,
|
||||
key: DepNode,
|
||||
edges: EdgesVec,
|
||||
current_fingerprint: Fingerprint,
|
||||
) -> DepNodeIndex {
|
||||
let dep_node_index = self.encoder.send_new(key, current_fingerprint, edges);
|
||||
|
||||
self.record_node(dep_node_index, key, current_fingerprint);
|
||||
|
||||
dep_node_index
|
||||
}
|
||||
|
|
|
@ -707,7 +707,8 @@ impl<D: Deps> GraphEncoder<D> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn send(
|
||||
/// Encodes a node that does not exists in the previous graph.
|
||||
pub(crate) fn send_new(
|
||||
&self,
|
||||
node: DepNode,
|
||||
fingerprint: Fingerprint,
|
||||
|
@ -718,6 +719,40 @@ impl<D: Deps> GraphEncoder<D> {
|
|||
self.status.lock().as_mut().unwrap().encode_node(&node, &self.record_graph)
|
||||
}
|
||||
|
||||
/// Encodes a node that exists in the previous graph, but was re-executed.
|
||||
///
|
||||
/// This will also ensure the dep node is colored either red or green.
|
||||
pub(crate) fn send_and_color(
|
||||
&self,
|
||||
prev_index: SerializedDepNodeIndex,
|
||||
colors: &DepNodeColorMap,
|
||||
node: DepNode,
|
||||
fingerprint: Fingerprint,
|
||||
edges: EdgesVec,
|
||||
is_green: bool,
|
||||
) -> DepNodeIndex {
|
||||
let _prof_timer = self.profiler.generic_activity("incr_comp_encode_dep_graph");
|
||||
let node = NodeInfo { node, fingerprint, edges };
|
||||
|
||||
let mut status = self.status.lock();
|
||||
let status = status.as_mut().unwrap();
|
||||
|
||||
// Check colors inside the lock to avoid racing when `send_promoted` is called concurrently
|
||||
// on the same index.
|
||||
match colors.get(prev_index) {
|
||||
None => {
|
||||
let dep_node_index = status.encode_node(&node, &self.record_graph);
|
||||
colors.insert(
|
||||
prev_index,
|
||||
if is_green { DepNodeColor::Green(dep_node_index) } else { DepNodeColor::Red },
|
||||
);
|
||||
dep_node_index
|
||||
}
|
||||
Some(DepNodeColor::Green(dep_node_index)) => dep_node_index,
|
||||
Some(DepNodeColor::Red) => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes a node that was promoted from the previous graph. It reads the information directly from
|
||||
/// the previous dep graph and expects all edges to already have a new dep node index assigned.
|
||||
///
|
||||
|
@ -733,8 +768,8 @@ impl<D: Deps> GraphEncoder<D> {
|
|||
let mut status = self.status.lock();
|
||||
let status = status.as_mut().unwrap();
|
||||
|
||||
// Check colors inside the lock to avoid racing when `send_promoted` is called concurrently
|
||||
// on the same index.
|
||||
// Check colors inside the lock to avoid racing when `send_promoted` or `send_and_color`
|
||||
// is called concurrently on the same index.
|
||||
match colors.get(prev_index) {
|
||||
None => {
|
||||
let dep_node_index =
|
||||
|
|
|
@ -18,7 +18,6 @@ rustc_expand = { path = "../rustc_expand" }
|
|||
rustc_feature = { path = "../rustc_feature" }
|
||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_macros = { path = "../rustc_macros" }
|
||||
rustc_metadata = { path = "../rustc_metadata" }
|
||||
rustc_middle = { path = "../rustc_middle" }
|
||||
|
|
|
@ -1207,7 +1207,7 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
|
|||
for (rule_i, rule_span) in &self.r.macro_map[&def_id.to_def_id()].rule_spans {
|
||||
self.r
|
||||
.unused_macro_rules
|
||||
.entry(def_id)
|
||||
.entry(node_id)
|
||||
.or_default()
|
||||
.insert(*rule_i, (ident, *rule_span));
|
||||
}
|
||||
|
|
|
@ -170,10 +170,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
|
||||
fn report_with_use_injections(&mut self, krate: &Crate) {
|
||||
for UseError { mut err, candidates, def_id, instead, suggestion, path, is_call } in
|
||||
self.use_injections.drain(..)
|
||||
std::mem::take(&mut self.use_injections)
|
||||
{
|
||||
let (span, found_use) = if let Some(def_id) = def_id.as_local() {
|
||||
UsePlacementFinder::check(krate, self.def_id_to_node_id[def_id])
|
||||
UsePlacementFinder::check(krate, self.def_id_to_node_id(def_id))
|
||||
} else {
|
||||
(None, FoundUse::No)
|
||||
};
|
||||
|
@ -1435,7 +1435,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
let import_suggestions =
|
||||
self.lookup_import_candidates(ident, Namespace::MacroNS, parent_scope, is_expected);
|
||||
let (span, found_use) = match parent_scope.module.nearest_parent_mod().as_local() {
|
||||
Some(def_id) => UsePlacementFinder::check(krate, self.def_id_to_node_id[def_id]),
|
||||
Some(def_id) => UsePlacementFinder::check(krate, self.def_id_to_node_id(def_id)),
|
||||
None => (None, FoundUse::No),
|
||||
};
|
||||
show_candidates(
|
||||
|
@ -2550,7 +2550,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
.iter()
|
||||
.filter_map(|item| {
|
||||
let parent_module = self.opt_local_def_id(item.parent_module)?.to_def_id();
|
||||
Some(StrippedCfgItem { parent_module, name: item.name, cfg: item.cfg.clone() })
|
||||
Some(StrippedCfgItem {
|
||||
parent_module,
|
||||
ident: item.ident,
|
||||
cfg: item.cfg.clone(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
local_items.as_slice()
|
||||
|
@ -2558,12 +2562,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
self.tcx.stripped_cfg_items(module.krate)
|
||||
};
|
||||
|
||||
for &StrippedCfgItem { parent_module, name, ref cfg } in symbols {
|
||||
if parent_module != module || name.name != *segment {
|
||||
for &StrippedCfgItem { parent_module, ident, ref cfg } in symbols {
|
||||
if parent_module != module || ident.name != *segment {
|
||||
continue;
|
||||
}
|
||||
|
||||
let note = errors::FoundItemConfigureOut { span: name.span };
|
||||
let note = errors::FoundItemConfigureOut { span: ident.span };
|
||||
err.subdiagnostic(note);
|
||||
|
||||
if let MetaItemKind::List(nested) = &cfg.kind
|
||||
|
|
|
@ -296,9 +296,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
) -> Option<LexicalScopeBinding<'ra>> {
|
||||
assert!(ns == TypeNS || ns == ValueNS);
|
||||
let orig_ident = ident;
|
||||
if ident.name == kw::Empty {
|
||||
return Some(LexicalScopeBinding::Res(Res::Err));
|
||||
}
|
||||
let (general_span, normalized_span) = if ident.name == kw::SelfUpper {
|
||||
// FIXME(jseyfried) improve `Self` hygiene
|
||||
let empty_span = ident.span.with_ctxt(SyntaxContext::root());
|
||||
|
|
|
@ -639,38 +639,38 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
}
|
||||
|
||||
if let Some(glob_binding) = resolution.shadowed_glob {
|
||||
let binding_id = match binding.kind {
|
||||
NameBindingKind::Res(res) => {
|
||||
Some(self.def_id_to_node_id[res.def_id().expect_local()])
|
||||
}
|
||||
NameBindingKind::Module(module) => {
|
||||
Some(self.def_id_to_node_id[module.def_id().expect_local()])
|
||||
}
|
||||
NameBindingKind::Import { import, .. } => import.id(),
|
||||
};
|
||||
|
||||
if binding.res() != Res::Err
|
||||
&& glob_binding.res() != Res::Err
|
||||
&& let NameBindingKind::Import { import: glob_import, .. } =
|
||||
glob_binding.kind
|
||||
&& let Some(binding_id) = binding_id
|
||||
&& let Some(glob_import_id) = glob_import.id()
|
||||
&& let glob_import_def_id = self.local_def_id(glob_import_id)
|
||||
&& self.effective_visibilities.is_exported(glob_import_def_id)
|
||||
&& glob_binding.vis.is_public()
|
||||
&& !binding.vis.is_public()
|
||||
{
|
||||
self.lint_buffer.buffer_lint(
|
||||
HIDDEN_GLOB_REEXPORTS,
|
||||
binding_id,
|
||||
binding.span,
|
||||
BuiltinLintDiag::HiddenGlobReexports {
|
||||
name: key.ident.name.to_string(),
|
||||
namespace: key.ns.descr().to_owned(),
|
||||
glob_reexport_span: glob_binding.span,
|
||||
private_item_span: binding.span,
|
||||
},
|
||||
);
|
||||
let binding_id = match binding.kind {
|
||||
NameBindingKind::Res(res) => {
|
||||
Some(self.def_id_to_node_id(res.def_id().expect_local()))
|
||||
}
|
||||
NameBindingKind::Module(module) => {
|
||||
Some(self.def_id_to_node_id(module.def_id().expect_local()))
|
||||
}
|
||||
NameBindingKind::Import { import, .. } => import.id(),
|
||||
};
|
||||
if let Some(binding_id) = binding_id {
|
||||
self.lint_buffer.buffer_lint(
|
||||
HIDDEN_GLOB_REEXPORTS,
|
||||
binding_id,
|
||||
binding.span,
|
||||
BuiltinLintDiag::HiddenGlobReexports {
|
||||
name: key.ident.name.to_string(),
|
||||
namespace: key.ns.descr().to_owned(),
|
||||
glob_reexport_span: glob_binding.span,
|
||||
private_item_span: binding.span,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5007,8 +5007,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
|
|||
return false;
|
||||
}
|
||||
let Some(local_did) = did.as_local() else { return true };
|
||||
let Some(node_id) = self.r.def_id_to_node_id.get(local_did) else { return true };
|
||||
!self.r.proc_macros.contains(node_id)
|
||||
!self.r.proc_macros.contains(&local_did)
|
||||
}
|
||||
|
||||
fn resolve_doc_links(&mut self, attrs: &[Attribute], maybe_exported: MaybeExported<'_>) {
|
||||
|
|
|
@ -2238,7 +2238,7 @@ impl<'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
|
|||
.get(&def_id)
|
||||
.is_some_and(|sig| sig.has_self),
|
||||
None => {
|
||||
self.r.tcx.fn_arg_names(def_id).first().is_some_and(|&ident| {
|
||||
self.r.tcx.fn_arg_idents(def_id).first().is_some_and(|&ident| {
|
||||
matches!(ident, Some(Ident { name: kw::SelfLower, .. }))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -56,7 +56,6 @@ use rustc_hir::def::{
|
|||
};
|
||||
use rustc_hir::def_id::{CRATE_DEF_ID, CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalDefIdMap};
|
||||
use rustc_hir::{PrimTy, TraitCandidate};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_metadata::creader::{CStore, CrateLoader};
|
||||
use rustc_middle::metadata::ModChild;
|
||||
use rustc_middle::middle::privacy::EffectiveVisibilities;
|
||||
|
@ -1141,7 +1140,7 @@ pub struct Resolver<'ra, 'tcx> {
|
|||
ast_transform_scopes: FxHashMap<LocalExpnId, Module<'ra>>,
|
||||
unused_macros: FxIndexMap<LocalDefId, (NodeId, Ident)>,
|
||||
/// A map from the macro to all its potentially unused arms.
|
||||
unused_macro_rules: FxIndexMap<LocalDefId, UnordMap<usize, (Ident, Span)>>,
|
||||
unused_macro_rules: FxIndexMap<NodeId, UnordMap<usize, (Ident, Span)>>,
|
||||
proc_macro_stubs: FxHashSet<LocalDefId>,
|
||||
/// Traces collected during macro resolution and validated when it's complete.
|
||||
single_segment_macro_resolutions:
|
||||
|
@ -1184,7 +1183,6 @@ pub struct Resolver<'ra, 'tcx> {
|
|||
next_node_id: NodeId,
|
||||
|
||||
node_id_to_def_id: NodeMap<Feed<'tcx, LocalDefId>>,
|
||||
def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>,
|
||||
|
||||
/// Indices of unnamed struct or variant fields with unresolved attributes.
|
||||
placeholder_field_indices: FxHashMap<NodeId, usize>,
|
||||
|
@ -1202,7 +1200,7 @@ pub struct Resolver<'ra, 'tcx> {
|
|||
trait_impls: FxIndexMap<DefId, Vec<LocalDefId>>,
|
||||
/// A list of proc macro LocalDefIds, written out in the order in which
|
||||
/// they are declared in the static array generated by proc_macro_harness.
|
||||
proc_macros: Vec<NodeId>,
|
||||
proc_macros: Vec<LocalDefId>,
|
||||
confused_type_with_std_module: FxIndexMap<Span, Span>,
|
||||
/// Whether lifetime elision was successful.
|
||||
lifetime_elision_allowed: FxHashSet<NodeId>,
|
||||
|
@ -1339,12 +1337,12 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
expn_id: ExpnId,
|
||||
span: Span,
|
||||
) -> TyCtxtFeed<'tcx, LocalDefId> {
|
||||
let data = def_kind.def_path_data(name);
|
||||
assert!(
|
||||
!self.node_id_to_def_id.contains_key(&node_id),
|
||||
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
|
||||
"adding a def for node-id {:?}, name {:?}, data {:?} but a previous def exists: {:?}",
|
||||
node_id,
|
||||
data,
|
||||
name,
|
||||
def_kind,
|
||||
self.tcx.definitions_untracked().def_key(self.node_id_to_def_id[&node_id].key()),
|
||||
);
|
||||
|
||||
|
@ -1369,7 +1367,6 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
|
||||
self.node_id_to_def_id.insert(node_id, feed.downgrade());
|
||||
}
|
||||
assert_eq!(self.def_id_to_node_id.push(node_id), def_id);
|
||||
|
||||
feed
|
||||
}
|
||||
|
@ -1385,6 +1382,19 @@ impl<'tcx> Resolver<'_, 'tcx> {
|
|||
pub fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
/// This function is very slow, as it iterates over the entire
|
||||
/// [Resolver::node_id_to_def_id] map just to find the [NodeId]
|
||||
/// that corresponds to the given [LocalDefId]. Only use this in
|
||||
/// diagnostics code paths.
|
||||
fn def_id_to_node_id(&self, def_id: LocalDefId) -> NodeId {
|
||||
self.node_id_to_def_id
|
||||
.items()
|
||||
.filter(|(_, v)| v.key() == def_id)
|
||||
.map(|(k, _)| *k)
|
||||
.get_only()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
||||
|
@ -1417,8 +1427,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
&mut Default::default(),
|
||||
);
|
||||
|
||||
let mut def_id_to_node_id = IndexVec::default();
|
||||
assert_eq!(def_id_to_node_id.push(CRATE_NODE_ID), CRATE_DEF_ID);
|
||||
let mut node_id_to_def_id = NodeMap::default();
|
||||
let crate_feed = tcx.create_local_crate_def_id(crate_span);
|
||||
|
||||
|
@ -1553,7 +1561,6 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
lint_buffer: LintBuffer::default(),
|
||||
next_node_id: CRATE_NODE_ID,
|
||||
node_id_to_def_id,
|
||||
def_id_to_node_id,
|
||||
placeholder_field_indices: Default::default(),
|
||||
invocation_parents,
|
||||
legacy_const_generic_args: Default::default(),
|
||||
|
@ -1633,7 +1640,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
}
|
||||
|
||||
pub fn into_outputs(self) -> ResolverOutputs {
|
||||
let proc_macros = self.proc_macros.iter().map(|id| self.local_def_id(*id)).collect();
|
||||
let proc_macros = self.proc_macros;
|
||||
let expn_that_defined = self.expn_that_defined;
|
||||
let extern_crate_map = self.extern_crate_map;
|
||||
let maybe_unused_trait_imports = self.maybe_unused_trait_imports;
|
||||
|
@ -1648,7 +1655,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
.filter_map(|item| {
|
||||
let parent_module =
|
||||
self.node_id_to_def_id.get(&item.parent_module)?.key().to_def_id();
|
||||
Some(StrippedCfgItem { parent_module, name: item.name, cfg: item.cfg })
|
||||
Some(StrippedCfgItem { parent_module, ident: item.ident, cfg: item.cfg })
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
|
|
|
@ -323,8 +323,7 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
|
|||
}
|
||||
|
||||
fn record_macro_rule_usage(&mut self, id: NodeId, rule_i: usize) {
|
||||
let did = self.local_def_id(id);
|
||||
if let Some(rules) = self.unused_macro_rules.get_mut(&did) {
|
||||
if let Some(rules) = self.unused_macro_rules.get_mut(&id) {
|
||||
rules.remove(&rule_i);
|
||||
}
|
||||
}
|
||||
|
@ -337,15 +336,12 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
|
|||
ident.span,
|
||||
BuiltinLintDiag::UnusedMacroDefinition(ident.name),
|
||||
);
|
||||
// Do not report unused individual rules if the entire macro is unused
|
||||
self.unused_macro_rules.swap_remove(&node_id);
|
||||
}
|
||||
|
||||
for (&def_id, unused_arms) in self.unused_macro_rules.iter() {
|
||||
for (&node_id, unused_arms) in self.unused_macro_rules.iter() {
|
||||
for (&arm_i, &(ident, rule_span)) in unused_arms.to_sorted_stable_ord() {
|
||||
if self.unused_macros.contains_key(&def_id) {
|
||||
// We already lint the entire macro as unused
|
||||
continue;
|
||||
}
|
||||
let node_id = self.def_id_to_node_id[def_id];
|
||||
self.lint_buffer.buffer_lint(
|
||||
UNUSED_MACRO_RULES,
|
||||
node_id,
|
||||
|
@ -466,11 +462,11 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> {
|
|||
}
|
||||
|
||||
fn declare_proc_macro(&mut self, id: NodeId) {
|
||||
self.proc_macros.push(id)
|
||||
self.proc_macros.push(self.local_def_id(id))
|
||||
}
|
||||
|
||||
fn append_stripped_cfg_item(&mut self, parent_node: NodeId, name: Ident, cfg: ast::MetaItem) {
|
||||
self.stripped_cfg_items.push(StrippedCfgItem { parent_module: parent_node, name, cfg });
|
||||
fn append_stripped_cfg_item(&mut self, parent_node: NodeId, ident: Ident, cfg: ast::MetaItem) {
|
||||
self.stripped_cfg_items.push(StrippedCfgItem { parent_module: parent_node, ident, cfg });
|
||||
}
|
||||
|
||||
fn registered_tools(&self) -> &RegisteredTools {
|
||||
|
@ -932,7 +928,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
|
|||
.invocation_parents
|
||||
.get(&parent_scope.expansion)
|
||||
.map_or(ast::CRATE_NODE_ID, |parent| {
|
||||
self.def_id_to_node_id[parent.parent_def]
|
||||
self.def_id_to_node_id(parent.parent_def)
|
||||
});
|
||||
self.lint_buffer.buffer_lint(
|
||||
LEGACY_DERIVE_HELPERS,
|
||||
|
|
|
@ -721,7 +721,8 @@ fn encode_ty_name(tcx: TyCtxt<'_>, def_id: DefId) -> String {
|
|||
| hir::definitions::DefPathData::Use
|
||||
| hir::definitions::DefPathData::GlobalAsm
|
||||
| hir::definitions::DefPathData::MacroNs(..)
|
||||
| hir::definitions::DefPathData::LifetimeNs(..) => {
|
||||
| hir::definitions::DefPathData::LifetimeNs(..)
|
||||
| hir::definitions::DefPathData::AnonAssocTy => {
|
||||
bug!("encode_ty_name: unexpected `{:?}`", disambiguated_data.data);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -7,6 +7,7 @@ edition = "2024"
|
|||
# tidy-alphabetical-start
|
||||
bitflags = "2.4.1"
|
||||
getopts = "0.2"
|
||||
rand = "0.9.0"
|
||||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue