Auto merge of #120500 - oli-obk:intrinsics2.0, r=WaffleLapkin
Implement intrinsics with fallback bodies fixes #93145 (though we can port many more intrinsics) cc #63585 The way this works is that the backend logic for generating custom code for intrinsics has been made fallible. The only failure path is "this intrinsic is unknown". The `Instance` (that was `InstanceDef::Intrinsic`) then gets converted to `InstanceDef::Item`, which represents the fallback body. A regular function call to that body is then codegenned. This is currently implemented for * codegen_ssa (so llvm and gcc) * codegen_cranelift other backends will need to adjust, but they can just keep doing what they were doing if they prefer (though adding new intrinsics to the compiler will then require them to implement them, instead of getting the fallback body). cc `@scottmcm` `@WaffleLapkin` ### todo * [ ] miri support * [x] default intrinsic name to name of function instead of requiring it to be specified in attribute * [x] make sure that the bodies are always available (must be collected for metadata)
This commit is contained in:
commit
dfa88b328f
49 changed files with 621 additions and 452 deletions
|
@ -1666,16 +1666,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||
|
||||
let func_ty = func.ty(body, self.infcx.tcx);
|
||||
if let ty::FnDef(def_id, _) = *func_ty.kind() {
|
||||
if self.tcx().is_intrinsic(def_id) {
|
||||
match self.tcx().item_name(def_id) {
|
||||
sym::simd_shuffle => {
|
||||
if !matches!(args[2], Spanned { node: Operand::Constant(_), .. }) {
|
||||
self.tcx()
|
||||
.dcx()
|
||||
.emit_err(SimdShuffleLastConst { span: term.source_info.span });
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
if let Some(sym::simd_shuffle) = self.tcx().intrinsic(def_id) {
|
||||
if !matches!(args[2], Spanned { node: Operand::Constant(_), .. }) {
|
||||
self.tcx().dcx().emit_err(SimdShuffleLastConst { span: term.source_info.span });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -387,15 +387,17 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
|||
|
||||
match instance.def {
|
||||
InstanceDef::Intrinsic(_) => {
|
||||
crate::intrinsics::codegen_intrinsic_call(
|
||||
match crate::intrinsics::codegen_intrinsic_call(
|
||||
fx,
|
||||
instance,
|
||||
args,
|
||||
ret_place,
|
||||
target,
|
||||
source_info,
|
||||
);
|
||||
return;
|
||||
) {
|
||||
Ok(()) => return,
|
||||
Err(instance) => Some(instance),
|
||||
}
|
||||
}
|
||||
InstanceDef::DropGlue(_, None) => {
|
||||
// empty drop glue - a nop.
|
||||
|
|
|
@ -268,7 +268,7 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
|
|||
destination: CPlace<'tcx>,
|
||||
target: Option<BasicBlock>,
|
||||
source_info: mir::SourceInfo,
|
||||
) {
|
||||
) -> Result<(), Instance<'tcx>> {
|
||||
let intrinsic = fx.tcx.item_name(instance.def_id());
|
||||
let instance_args = instance.args;
|
||||
|
||||
|
@ -295,8 +295,9 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
|
|||
destination,
|
||||
target,
|
||||
source_info,
|
||||
);
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn codegen_float_intrinsic_call<'tcx>(
|
||||
|
@ -430,25 +431,20 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ret: CPlace<'tcx>,
|
||||
destination: Option<BasicBlock>,
|
||||
source_info: mir::SourceInfo,
|
||||
) {
|
||||
) -> Result<(), Instance<'tcx>> {
|
||||
assert_eq!(generic_args, instance.args);
|
||||
let usize_layout = fx.layout_of(fx.tcx.types.usize);
|
||||
|
||||
match intrinsic {
|
||||
sym::abort => {
|
||||
fx.bcx.ins().trap(TrapCode::User(0));
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::likely | sym::unlikely => {
|
||||
intrinsic_args!(fx, args => (a); intrinsic);
|
||||
|
||||
ret.write_cvalue(fx, a);
|
||||
}
|
||||
sym::is_val_statically_known => {
|
||||
intrinsic_args!(fx, args => (_a); intrinsic);
|
||||
|
||||
let res = fx.bcx.ins().iconst(types::I8, 0);
|
||||
ret.write_cvalue(fx, CValue::by_val(res, ret.layout()));
|
||||
}
|
||||
sym::breakpoint => {
|
||||
intrinsic_args!(fx, args => (); intrinsic);
|
||||
|
||||
|
@ -697,7 +693,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
})
|
||||
});
|
||||
crate::base::codegen_panic_nounwind(fx, &msg_str, Some(source_info.span));
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -792,7 +788,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
if fx.tcx.is_compiler_builtins(LOCAL_CRATE) {
|
||||
// special case for compiler-builtins to avoid having to patch it
|
||||
crate::trap::trap_unimplemented(fx, "128bit atomics not yet supported");
|
||||
return;
|
||||
return Ok(());
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
|
@ -802,7 +798,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let clif_ty = fx.clif_type(ty).unwrap();
|
||||
|
@ -823,7 +819,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
if fx.tcx.is_compiler_builtins(LOCAL_CRATE) {
|
||||
// special case for compiler-builtins to avoid having to patch it
|
||||
crate::trap::trap_unimplemented(fx, "128bit atomics not yet supported");
|
||||
return;
|
||||
return Ok(());
|
||||
} else {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
|
@ -833,7 +829,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -850,7 +846,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -872,7 +868,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -895,7 +891,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -917,7 +913,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -939,7 +935,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -960,7 +956,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -981,7 +977,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1002,7 +998,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1023,7 +1019,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1044,7 +1040,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1065,7 +1061,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1086,7 +1082,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ty::Uint(_) | ty::Int(_) | ty::RawPtr(..) => {}
|
||||
_ => {
|
||||
report_atomic_type_validation_error(fx, intrinsic, source_info.span, layout.ty);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let ty = fx.clif_type(layout.ty).unwrap();
|
||||
|
@ -1233,19 +1229,6 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
ret.write_cvalue(fx, CValue::by_val(cmp, ret.layout()));
|
||||
}
|
||||
|
||||
sym::const_allocate => {
|
||||
intrinsic_args!(fx, args => (_size, _align); intrinsic);
|
||||
|
||||
// returns a null pointer at runtime.
|
||||
let null = fx.bcx.ins().iconst(fx.pointer_type, 0);
|
||||
ret.write_cvalue(fx, CValue::by_val(null, ret.layout()));
|
||||
}
|
||||
|
||||
sym::const_deallocate => {
|
||||
intrinsic_args!(fx, args => (_ptr, _size, _align); intrinsic);
|
||||
// nop at runtime.
|
||||
}
|
||||
|
||||
sym::black_box => {
|
||||
intrinsic_args!(fx, args => (a); intrinsic);
|
||||
|
||||
|
@ -1261,13 +1244,12 @@ fn codegen_regular_intrinsic_call<'tcx>(
|
|||
);
|
||||
}
|
||||
|
||||
_ => {
|
||||
fx.tcx
|
||||
.dcx()
|
||||
.span_fatal(source_info.span, format!("unsupported intrinsic {}", intrinsic));
|
||||
}
|
||||
// Unimplemented intrinsics must have a fallback body. The fallback body is obtained
|
||||
// by converting the `InstanceDef::Intrinsic` to an `InstanceDef::Item`.
|
||||
_ => return Err(Instance::new(instance.def_id(), instance.args)),
|
||||
}
|
||||
|
||||
let ret_block = fx.get_block(destination.unwrap());
|
||||
fx.bcx.ins().jump(ret_block, &[]);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ fn get_simple_intrinsic<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, name: Symbol) ->
|
|||
}
|
||||
|
||||
impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn codegen_intrinsic_call(&mut self, instance: Instance<'tcx>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[OperandRef<'tcx, RValue<'gcc>>], llresult: RValue<'gcc>, span: Span) {
|
||||
fn codegen_intrinsic_call(&mut self, instance: Instance<'tcx>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[OperandRef<'tcx, RValue<'gcc>>], llresult: RValue<'gcc>, span: Span) -> Result<(), Instance<'tcx>> {
|
||||
let tcx = self.tcx;
|
||||
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
|
||||
|
||||
|
@ -137,7 +137,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
args[2].immediate(),
|
||||
llresult,
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::breakpoint => {
|
||||
unimplemented!();
|
||||
|
@ -166,12 +166,12 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
sym::volatile_store => {
|
||||
let dst = args[0].deref(self.cx());
|
||||
args[1].val.volatile_store(self, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::unaligned_volatile_store => {
|
||||
let dst = args[0].deref(self.cx());
|
||||
args[1].val.unaligned_volatile_store(self, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::prefetch_read_data
|
||||
| sym::prefetch_write_data
|
||||
|
@ -269,7 +269,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
},
|
||||
None => {
|
||||
tcx.dcx().emit_err(InvalidMonomorphization::BasicIntegerType { span, name, ty });
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -339,7 +339,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
extended_asm.set_volatile_flag(true);
|
||||
|
||||
// We have copied the value to `result` already.
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sym::ptr_mask => {
|
||||
|
@ -357,11 +357,12 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
_ if name_str.starts_with("simd_") => {
|
||||
match generic_simd_intrinsic(self, name, callee_ty, args, ret_ty, llret_ty, span) {
|
||||
Ok(llval) => llval,
|
||||
Err(()) => return,
|
||||
Err(()) => return Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
_ => bug!("unknown intrinsic '{}'", name),
|
||||
// Fall back to default body
|
||||
_ => return Err(Instance::new(instance.def_id(), instance.args)),
|
||||
};
|
||||
|
||||
if !fn_abi.ret.is_ignore() {
|
||||
|
@ -376,6 +377,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
|||
.store(self, result);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn abort(&mut self) {
|
||||
|
|
|
@ -86,7 +86,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
args: &[OperandRef<'tcx, &'ll Value>],
|
||||
llresult: &'ll Value,
|
||||
span: Span,
|
||||
) {
|
||||
) -> Result<(), ty::Instance<'tcx>> {
|
||||
let tcx = self.tcx;
|
||||
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
|
||||
|
||||
|
@ -141,7 +141,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
args[2].immediate(),
|
||||
llresult,
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::breakpoint => self.call_intrinsic("llvm.debugtrap", &[]),
|
||||
sym::va_copy => {
|
||||
|
@ -194,17 +194,17 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
if !result.layout.is_zst() {
|
||||
self.store(load, result.llval, result.align);
|
||||
}
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::volatile_store => {
|
||||
let dst = args[0].deref(self.cx());
|
||||
args[1].val.volatile_store(self, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::unaligned_volatile_store => {
|
||||
let dst = args[0].deref(self.cx());
|
||||
args[1].val.unaligned_volatile_store(self, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::prefetch_read_data
|
||||
| sym::prefetch_write_data
|
||||
|
@ -305,7 +305,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
name,
|
||||
ty,
|
||||
});
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -387,7 +387,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
.unwrap_or_else(|| bug!("failed to generate inline asm call for `black_box`"));
|
||||
|
||||
// We have copied the value to `result` already.
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
_ if name.as_str().starts_with("simd_") => {
|
||||
|
@ -395,11 +395,15 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
self, name, callee_ty, fn_args, args, ret_ty, llret_ty, span,
|
||||
) {
|
||||
Ok(llval) => llval,
|
||||
Err(()) => return,
|
||||
Err(()) => return Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
_ => bug!("unknown intrinsic '{}' -- should it have been lowered earlier?", name),
|
||||
_ => {
|
||||
debug!("unknown intrinsic '{}' -- falling back to default body", name);
|
||||
// Call the fallback body instead of generating the intrinsic code
|
||||
return Err(ty::Instance::new(instance.def_id(), instance.args));
|
||||
}
|
||||
};
|
||||
|
||||
if !fn_abi.ret.is_ignore() {
|
||||
|
@ -411,6 +415,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
|||
.store(self, result);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn abort(&mut self) {
|
||||
|
|
|
@ -787,7 +787,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
// Handle intrinsics old codegen wants Expr's for, ourselves.
|
||||
let intrinsic = match def {
|
||||
Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id)),
|
||||
Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().intrinsic(def_id).unwrap()),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
@ -817,21 +817,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
// The arguments we'll be passing. Plus one to account for outptr, if used.
|
||||
let arg_count = fn_abi.args.len() + fn_abi.ret.is_indirect() as usize;
|
||||
let mut llargs = Vec::with_capacity(arg_count);
|
||||
|
||||
// Prepare the return value destination
|
||||
let ret_dest = if target.is_some() {
|
||||
let is_intrinsic = intrinsic.is_some();
|
||||
self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs, is_intrinsic)
|
||||
} else {
|
||||
ReturnDest::Nothing
|
||||
};
|
||||
|
||||
if intrinsic == Some(sym::caller_location) {
|
||||
return if let Some(target) = target {
|
||||
let location =
|
||||
self.get_caller_location(bx, mir::SourceInfo { span: fn_span, ..source_info });
|
||||
|
||||
let mut llargs = Vec::with_capacity(arg_count);
|
||||
let ret_dest =
|
||||
self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs, true, true);
|
||||
assert_eq!(llargs, []);
|
||||
if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
|
||||
location.val.store(bx, tmp);
|
||||
}
|
||||
|
@ -842,9 +837,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
};
|
||||
}
|
||||
|
||||
match intrinsic {
|
||||
None | Some(sym::drop_in_place) => {}
|
||||
let instance = match intrinsic {
|
||||
None | Some(sym::drop_in_place) => instance,
|
||||
Some(intrinsic) => {
|
||||
let mut llargs = Vec::with_capacity(1);
|
||||
let ret_dest = self.make_return_dest(
|
||||
bx,
|
||||
destination,
|
||||
&fn_abi.ret,
|
||||
&mut llargs,
|
||||
true,
|
||||
target.is_some(),
|
||||
);
|
||||
let dest = match ret_dest {
|
||||
_ if fn_abi.ret.is_indirect() => llargs[0],
|
||||
ReturnDest::Nothing => bx.const_undef(bx.type_ptr()),
|
||||
|
@ -878,27 +882,29 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
})
|
||||
.collect();
|
||||
|
||||
Self::codegen_intrinsic_call(
|
||||
bx,
|
||||
*instance.as_ref().unwrap(),
|
||||
fn_abi,
|
||||
&args,
|
||||
dest,
|
||||
span,
|
||||
);
|
||||
let instance = *instance.as_ref().unwrap();
|
||||
match Self::codegen_intrinsic_call(bx, instance, fn_abi, &args, dest, span) {
|
||||
Ok(()) => {
|
||||
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
|
||||
self.store_return(bx, ret_dest, &fn_abi.ret, dst.llval);
|
||||
}
|
||||
|
||||
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
|
||||
self.store_return(bx, ret_dest, &fn_abi.ret, dst.llval);
|
||||
return if let Some(target) = target {
|
||||
helper.funclet_br(self, bx, target, mergeable_succ)
|
||||
} else {
|
||||
bx.unreachable();
|
||||
MergingSucc::False
|
||||
};
|
||||
}
|
||||
Err(instance) => Some(instance),
|
||||
}
|
||||
|
||||
return if let Some(target) = target {
|
||||
helper.funclet_br(self, bx, target, mergeable_succ)
|
||||
} else {
|
||||
bx.unreachable();
|
||||
MergingSucc::False
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let mut llargs = Vec::with_capacity(arg_count);
|
||||
let destination = target.as_ref().map(|&target| {
|
||||
(self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs, false, true), target)
|
||||
});
|
||||
|
||||
// Split the rust-call tupled arguments off.
|
||||
let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
|
||||
|
@ -1040,14 +1046,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
(_, Some(llfn)) => llfn,
|
||||
_ => span_bug!(span, "no instance or llfn for call"),
|
||||
};
|
||||
|
||||
helper.do_call(
|
||||
self,
|
||||
bx,
|
||||
fn_abi,
|
||||
fn_ptr,
|
||||
&llargs,
|
||||
target.as_ref().map(|&target| (ret_dest, target)),
|
||||
destination,
|
||||
unwind,
|
||||
&copied_constant_arguments,
|
||||
mergeable_succ,
|
||||
|
@ -1632,7 +1637,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
fn_ret: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
llargs: &mut Vec<Bx::Value>,
|
||||
is_intrinsic: bool,
|
||||
has_target: bool,
|
||||
) -> ReturnDest<'tcx, Bx::Value> {
|
||||
if !has_target {
|
||||
return ReturnDest::Nothing;
|
||||
}
|
||||
// If the return is ignored, we can just return a do-nothing `ReturnDest`.
|
||||
if fn_ret.is_ignore() {
|
||||
return ReturnDest::Nothing;
|
||||
|
|
|
@ -54,6 +54,7 @@ fn memset_intrinsic<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
|||
}
|
||||
|
||||
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
/// In the `Err` case, returns the instance that should be called instead.
|
||||
pub fn codegen_intrinsic_call(
|
||||
bx: &mut Bx,
|
||||
instance: ty::Instance<'tcx>,
|
||||
|
@ -61,7 +62,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args: &[OperandRef<'tcx, Bx::Value>],
|
||||
llresult: Bx::Value,
|
||||
span: Span,
|
||||
) {
|
||||
) -> Result<(), ty::Instance<'tcx>> {
|
||||
let callee_ty = instance.ty(bx.tcx(), ty::ParamEnv::reveal_all());
|
||||
|
||||
let ty::FnDef(def_id, fn_args) = *callee_ty.kind() else {
|
||||
|
@ -81,7 +82,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
let llval = match name {
|
||||
sym::abort => {
|
||||
bx.abort();
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sym::va_start => bx.va_start(args[0].immediate()),
|
||||
|
@ -150,7 +151,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[0].immediate(),
|
||||
args[2].immediate(),
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::write_bytes => {
|
||||
memset_intrinsic(
|
||||
|
@ -161,7 +162,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sym::volatile_copy_nonoverlapping_memory => {
|
||||
|
@ -174,7 +175,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::volatile_copy_memory => {
|
||||
copy_intrinsic(
|
||||
|
@ -186,7 +187,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::volatile_set_memory => {
|
||||
memset_intrinsic(
|
||||
|
@ -197,17 +198,17 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::volatile_store => {
|
||||
let dst = args[0].deref(bx.cx());
|
||||
args[1].val.volatile_store(bx, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::unaligned_volatile_store => {
|
||||
let dst = args[0].deref(bx.cx());
|
||||
args[1].val.unaligned_volatile_store(bx, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
sym::exact_div => {
|
||||
let ty = arg_tys[0];
|
||||
|
@ -225,7 +226,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
name,
|
||||
ty,
|
||||
});
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -245,7 +246,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
name,
|
||||
ty: arg_tys[0],
|
||||
});
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -256,14 +257,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
span,
|
||||
ty: arg_tys[0],
|
||||
});
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
let Some((_width, signed)) = int_type_width_signed(ret_ty, bx.tcx()) else {
|
||||
bx.tcx().dcx().emit_err(InvalidMonomorphization::FloatToIntUnchecked {
|
||||
span,
|
||||
ty: ret_ty,
|
||||
});
|
||||
return;
|
||||
return Ok(());
|
||||
};
|
||||
if signed {
|
||||
bx.fptosi(args[0].immediate(), llret_ty)
|
||||
|
@ -280,16 +281,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
}
|
||||
}
|
||||
|
||||
sym::const_allocate => {
|
||||
// returns a null pointer at runtime.
|
||||
bx.const_null(bx.type_ptr())
|
||||
}
|
||||
|
||||
sym::const_deallocate => {
|
||||
// nop at runtime.
|
||||
return;
|
||||
}
|
||||
|
||||
// This requires that atomic intrinsics follow a specific naming pattern:
|
||||
// "atomic_<operation>[_<ordering>]"
|
||||
name if let Some(atomic) = name_str.strip_prefix("atomic_") => {
|
||||
|
@ -350,10 +341,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
bx.store(val, dest.llval, dest.align);
|
||||
let dest = result.project_field(bx, 1);
|
||||
bx.store(success, dest.llval, dest.align);
|
||||
return;
|
||||
} else {
|
||||
return invalid_monomorphization(ty);
|
||||
invalid_monomorphization(ty);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
"load" => {
|
||||
|
@ -383,7 +374,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
)
|
||||
}
|
||||
} else {
|
||||
return invalid_monomorphization(ty);
|
||||
invalid_monomorphization(ty);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -399,10 +391,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
val = bx.ptrtoint(val, bx.type_isize());
|
||||
}
|
||||
bx.atomic_store(val, ptr, parse_ordering(bx, ordering), size);
|
||||
return;
|
||||
} else {
|
||||
return invalid_monomorphization(ty);
|
||||
invalid_monomorphization(ty);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
"fence" => {
|
||||
|
@ -410,7 +402,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
parse_ordering(bx, ordering),
|
||||
SynchronizationScope::CrossThread,
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
"singlethreadfence" => {
|
||||
|
@ -418,7 +410,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
parse_ordering(bx, ordering),
|
||||
SynchronizationScope::SingleThread,
|
||||
);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// These are all AtomicRMW ops
|
||||
|
@ -449,7 +441,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
}
|
||||
bx.atomic_rmw(atom_op, ptr, val, parse_ordering(bx, ordering))
|
||||
} else {
|
||||
return invalid_monomorphization(ty);
|
||||
invalid_monomorphization(ty);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -458,7 +451,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
sym::nontemporal_store => {
|
||||
let dst = args[0].deref(bx.cx());
|
||||
args[1].val.nontemporal_store(bx, dst);
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
sym::ptr_guaranteed_cmp => {
|
||||
|
@ -493,8 +486,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
|
||||
_ => {
|
||||
// Need to use backend-specific things in the implementation.
|
||||
bx.codegen_intrinsic_call(instance, fn_abi, args, llresult, span);
|
||||
return;
|
||||
return bx.codegen_intrinsic_call(instance, fn_abi, args, llresult, span);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -507,6 +499,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
|||
.store(bx, result);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
|
|||
/// Remember to add all intrinsics here, in `compiler/rustc_hir_analysis/src/check/mod.rs`,
|
||||
/// and in `library/core/src/intrinsics.rs`; if you need access to any LLVM intrinsics,
|
||||
/// add them to `compiler/rustc_codegen_llvm/src/context.rs`.
|
||||
/// Returns `Err` if another instance should be called instead. This is used to invoke
|
||||
/// intrinsic default bodies in case an intrinsic is not implemented by the backend.
|
||||
fn codegen_intrinsic_call(
|
||||
&mut self,
|
||||
instance: ty::Instance<'tcx>,
|
||||
|
@ -15,7 +17,7 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
|
|||
args: &[OperandRef<'tcx, Self::Value>],
|
||||
llresult: Self::Value,
|
||||
span: Span,
|
||||
);
|
||||
) -> Result<(), ty::Instance<'tcx>>;
|
||||
|
||||
fn abort(&mut self);
|
||||
fn assume(&mut self, val: Self::Value);
|
||||
|
|
|
@ -49,7 +49,7 @@ fn constness(tcx: TyCtxt<'_>, def_id: LocalDefId) -> hir::Constness {
|
|||
hir::Node::ForeignItem(hir::ForeignItem { kind: hir::ForeignItemKind::Fn(..), .. }) => {
|
||||
// Intrinsics use `rustc_const_{un,}stable` attributes to indicate constness. All other
|
||||
// foreign items cannot be evaluated at compile-time.
|
||||
let is_const = if tcx.is_intrinsic(def_id) {
|
||||
let is_const = if tcx.intrinsic(def_id).is_some() {
|
||||
tcx.lookup_const_stability(def_id).is_some()
|
||||
} else {
|
||||
false
|
||||
|
|
|
@ -526,7 +526,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
|||
|
||||
match instance.def {
|
||||
ty::InstanceDef::Intrinsic(def_id) => {
|
||||
assert!(self.tcx.is_intrinsic(def_id));
|
||||
assert!(self.tcx.intrinsic(def_id).is_some());
|
||||
// FIXME: Should `InPlace` arguments be reset to uninit?
|
||||
M::call_intrinsic(
|
||||
self,
|
||||
|
|
|
@ -861,7 +861,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
|||
// We do not use `const` modifiers for intrinsic "functions", as intrinsics are
|
||||
// `extern` functions, and these have no way to get marked `const`. So instead we
|
||||
// use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
|
||||
if self.ccx.is_const_stable_const_fn() || tcx.is_intrinsic(callee) {
|
||||
if self.ccx.is_const_stable_const_fn() || tcx.intrinsic(callee).is_some() {
|
||||
self.check_op(ops::FnCallUnstable(callee, None));
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -788,6 +788,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
|||
rustc_safe_intrinsic, Normal, template!(Word), WarnFollowing,
|
||||
"the `#[rustc_safe_intrinsic]` attribute is used internally to mark intrinsics as safe"
|
||||
),
|
||||
rustc_attr!(
|
||||
rustc_intrinsic, Normal, template!(Word), ErrorFollowing,
|
||||
"the `#[rustc_intrinsic]` attribute is used to declare intrinsics with function bodies",
|
||||
),
|
||||
|
||||
// ==========================================================================
|
||||
// Internal attributes, Testing:
|
||||
|
|
|
@ -525,7 +525,18 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
|||
DefKind::Enum => {
|
||||
check_enum(tcx, def_id);
|
||||
}
|
||||
DefKind::Fn => {} // entirely within check_item_body
|
||||
DefKind::Fn => {
|
||||
if let Some(name) = tcx.intrinsic(def_id) {
|
||||
intrinsic::check_intrinsic_type(
|
||||
tcx,
|
||||
def_id,
|
||||
tcx.def_ident_span(def_id).unwrap(),
|
||||
name,
|
||||
Abi::Rust,
|
||||
)
|
||||
}
|
||||
// Everything else is checked entirely within check_item_body
|
||||
}
|
||||
DefKind::Impl { of_trait } => {
|
||||
if of_trait && let Some(impl_trait_header) = tcx.impl_trait_header(def_id) {
|
||||
check_impl_items_against_trait(
|
||||
|
@ -590,15 +601,24 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
|||
match abi {
|
||||
Abi::RustIntrinsic => {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_intrinsic_type(tcx, item);
|
||||
intrinsic::check_intrinsic_type(
|
||||
tcx,
|
||||
item.id.owner_id.def_id,
|
||||
item.span,
|
||||
item.ident.name,
|
||||
abi,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Abi::PlatformIntrinsic => {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_platform_intrinsic_type(tcx, item);
|
||||
intrinsic::check_platform_intrinsic_type(
|
||||
tcx,
|
||||
item.id.owner_id.def_id,
|
||||
item.span,
|
||||
item.ident.name,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,30 +7,36 @@ use crate::errors::{
|
|||
WrongNumberOfGenericArgumentsToIntrinsic,
|
||||
};
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use rustc_errors::{codes::*, struct_span_code_err, DiagnosticMessage};
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::traits::{ObligationCause, ObligationCauseCode};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use rustc_span::{Span, Symbol};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
fn equate_intrinsic_type<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
it: &hir::ForeignItem<'_>,
|
||||
span: Span,
|
||||
def_id: LocalDefId,
|
||||
n_tps: usize,
|
||||
n_lts: usize,
|
||||
n_cts: usize,
|
||||
sig: ty::PolyFnSig<'tcx>,
|
||||
) {
|
||||
let (own_counts, span) = match &it.kind {
|
||||
hir::ForeignItemKind::Fn(.., generics) => {
|
||||
let own_counts = tcx.generics_of(it.owner_id.to_def_id()).own_counts();
|
||||
let (own_counts, span) = match tcx.hir_node_by_def_id(def_id) {
|
||||
hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })
|
||||
| hir::Node::ForeignItem(hir::ForeignItem {
|
||||
kind: hir::ForeignItemKind::Fn(.., generics),
|
||||
..
|
||||
}) => {
|
||||
let own_counts = tcx.generics_of(def_id).own_counts();
|
||||
(own_counts, generics.span)
|
||||
}
|
||||
_ => {
|
||||
struct_span_code_err!(tcx.dcx(), it.span, E0622, "intrinsic must be a function")
|
||||
.with_span_label(it.span, "expected a function")
|
||||
struct_span_code_err!(tcx.dcx(), span, E0622, "intrinsic must be a function")
|
||||
.with_span_label(span, "expected a function")
|
||||
.emit();
|
||||
return;
|
||||
}
|
||||
|
@ -54,23 +60,26 @@ fn equate_intrinsic_type<'tcx>(
|
|||
&& gen_count_ok(own_counts.types, n_tps, "type")
|
||||
&& gen_count_ok(own_counts.consts, n_cts, "const")
|
||||
{
|
||||
let it_def_id = it.owner_id.def_id;
|
||||
let _ = check_function_signature(
|
||||
tcx,
|
||||
ObligationCause::new(it.span, it_def_id, ObligationCauseCode::IntrinsicType),
|
||||
it_def_id.into(),
|
||||
ObligationCause::new(span, def_id, ObligationCauseCode::IntrinsicType),
|
||||
def_id.into(),
|
||||
sig,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the unsafety of the given intrinsic.
|
||||
pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: DefId) -> hir::Unsafety {
|
||||
let has_safe_attr = match tcx.has_attr(intrinsic_id, sym::rustc_safe_intrinsic) {
|
||||
true => hir::Unsafety::Normal,
|
||||
false => hir::Unsafety::Unsafe,
|
||||
pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: LocalDefId) -> hir::Unsafety {
|
||||
let has_safe_attr = if tcx.has_attr(intrinsic_id, sym::rustc_intrinsic) {
|
||||
tcx.fn_sig(intrinsic_id).skip_binder().unsafety()
|
||||
} else {
|
||||
match tcx.has_attr(intrinsic_id, sym::rustc_safe_intrinsic) {
|
||||
true => hir::Unsafety::Normal,
|
||||
false => hir::Unsafety::Unsafe,
|
||||
}
|
||||
};
|
||||
let is_in_list = match tcx.item_name(intrinsic_id) {
|
||||
let is_in_list = match tcx.item_name(intrinsic_id.into()) {
|
||||
// When adding a new intrinsic to this list,
|
||||
// it's usually worth updating that intrinsic's documentation
|
||||
// to note that it's safe to call, since
|
||||
|
@ -112,6 +121,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: DefId) -> hir
|
|||
| sym::forget
|
||||
| sym::black_box
|
||||
| sym::variant_count
|
||||
| sym::is_val_statically_known
|
||||
| sym::ptr_mask
|
||||
| sym::debug_assertions => hir::Unsafety::Normal,
|
||||
_ => hir::Unsafety::Unsafe,
|
||||
|
@ -122,7 +132,7 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: DefId) -> hir
|
|||
tcx.def_span(intrinsic_id),
|
||||
DiagnosticMessage::from(format!(
|
||||
"intrinsic safety mismatch between list of intrinsics within the compiler and core library intrinsics for intrinsic `{}`",
|
||||
tcx.item_name(intrinsic_id)
|
||||
tcx.item_name(intrinsic_id.into())
|
||||
)
|
||||
)).emit();
|
||||
}
|
||||
|
@ -132,8 +142,14 @@ pub fn intrinsic_operation_unsafety(tcx: TyCtxt<'_>, intrinsic_id: DefId) -> hir
|
|||
|
||||
/// Remember to add all intrinsics here, in `compiler/rustc_codegen_llvm/src/intrinsic.rs`,
|
||||
/// and in `library/core/src/intrinsics.rs`.
|
||||
pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
||||
let generics = tcx.generics_of(it.owner_id);
|
||||
pub fn check_intrinsic_type(
|
||||
tcx: TyCtxt<'_>,
|
||||
intrinsic_id: LocalDefId,
|
||||
span: Span,
|
||||
intrinsic_name: Symbol,
|
||||
abi: Abi,
|
||||
) {
|
||||
let generics = tcx.generics_of(intrinsic_id);
|
||||
let param = |n| {
|
||||
if let Some(&ty::GenericParamDef {
|
||||
name, kind: ty::GenericParamDefKind::Type { .. }, ..
|
||||
|
@ -141,11 +157,9 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
{
|
||||
Ty::new_param(tcx, n, name)
|
||||
} else {
|
||||
Ty::new_error_with_message(tcx, tcx.def_span(it.owner_id), "expected param")
|
||||
Ty::new_error_with_message(tcx, span, "expected param")
|
||||
}
|
||||
};
|
||||
let intrinsic_id = it.owner_id.to_def_id();
|
||||
let intrinsic_name = tcx.item_name(intrinsic_id);
|
||||
let name_str = intrinsic_name.as_str();
|
||||
|
||||
let bound_vars = tcx.mk_bound_variable_kinds(&[
|
||||
|
@ -169,7 +183,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
})
|
||||
};
|
||||
|
||||
let (n_tps, n_lts, inputs, output, unsafety) = if name_str.starts_with("atomic_") {
|
||||
let (n_tps, n_lts, n_cts, inputs, output, unsafety) = if name_str.starts_with("atomic_") {
|
||||
let split: Vec<&str> = name_str.split('_').collect();
|
||||
assert!(split.len() >= 2, "Atomic intrinsic in an incorrect format");
|
||||
|
||||
|
@ -187,49 +201,51 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
| "umin" => (1, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], param(0)),
|
||||
"fence" | "singlethreadfence" => (0, Vec::new(), Ty::new_unit(tcx)),
|
||||
op => {
|
||||
tcx.dcx().emit_err(UnrecognizedAtomicOperation { span: it.span, op });
|
||||
tcx.dcx().emit_err(UnrecognizedAtomicOperation { span, op });
|
||||
return;
|
||||
}
|
||||
};
|
||||
(n_tps, 0, inputs, output, hir::Unsafety::Unsafe)
|
||||
(n_tps, 0, 0, inputs, output, hir::Unsafety::Unsafe)
|
||||
} else {
|
||||
let unsafety = intrinsic_operation_unsafety(tcx, intrinsic_id);
|
||||
let (n_tps, inputs, output) = match intrinsic_name {
|
||||
sym::abort => (0, Vec::new(), tcx.types.never),
|
||||
sym::unreachable => (0, Vec::new(), tcx.types.never),
|
||||
sym::breakpoint => (0, Vec::new(), Ty::new_unit(tcx)),
|
||||
let (n_tps, n_cts, inputs, output) = match intrinsic_name {
|
||||
sym::abort => (0, 0, vec![], tcx.types.never),
|
||||
sym::unreachable => (0, 0, vec![], tcx.types.never),
|
||||
sym::breakpoint => (0, 0, vec![], Ty::new_unit(tcx)),
|
||||
sym::size_of | sym::pref_align_of | sym::min_align_of | sym::variant_count => {
|
||||
(1, Vec::new(), tcx.types.usize)
|
||||
(1, 0, vec![], tcx.types.usize)
|
||||
}
|
||||
sym::size_of_val | sym::min_align_of_val => {
|
||||
(1, vec![Ty::new_imm_ptr(tcx, param(0))], tcx.types.usize)
|
||||
(1, 0, vec![Ty::new_imm_ptr(tcx, param(0))], tcx.types.usize)
|
||||
}
|
||||
sym::rustc_peek => (1, vec![param(0)], param(0)),
|
||||
sym::caller_location => (0, vec![], tcx.caller_location_ty()),
|
||||
sym::rustc_peek => (1, 0, vec![param(0)], param(0)),
|
||||
sym::caller_location => (0, 0, vec![], tcx.caller_location_ty()),
|
||||
sym::assert_inhabited
|
||||
| sym::assert_zero_valid
|
||||
| sym::assert_mem_uninitialized_valid => (1, Vec::new(), Ty::new_unit(tcx)),
|
||||
sym::forget => (1, vec![param(0)], Ty::new_unit(tcx)),
|
||||
sym::transmute | sym::transmute_unchecked => (2, vec![param(0)], param(1)),
|
||||
| sym::assert_mem_uninitialized_valid => (1, 0, vec![], Ty::new_unit(tcx)),
|
||||
sym::forget => (1, 0, vec![param(0)], Ty::new_unit(tcx)),
|
||||
sym::transmute | sym::transmute_unchecked => (2, 0, vec![param(0)], param(1)),
|
||||
sym::prefetch_read_data
|
||||
| sym::prefetch_write_data
|
||||
| sym::prefetch_read_instruction
|
||||
| sym::prefetch_write_instruction => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Not }),
|
||||
tcx.types.i32,
|
||||
],
|
||||
Ty::new_unit(tcx),
|
||||
),
|
||||
sym::drop_in_place => (1, vec![Ty::new_mut_ptr(tcx, param(0))], Ty::new_unit(tcx)),
|
||||
sym::needs_drop => (1, Vec::new(), tcx.types.bool),
|
||||
sym::drop_in_place => (1, 0, vec![Ty::new_mut_ptr(tcx, param(0))], Ty::new_unit(tcx)),
|
||||
sym::needs_drop => (1, 0, vec![], tcx.types.bool),
|
||||
|
||||
sym::type_name => (1, Vec::new(), Ty::new_static_str(tcx)),
|
||||
sym::type_id => (1, Vec::new(), tcx.types.u128),
|
||||
sym::offset => (2, vec![param(0), param(1)], param(0)),
|
||||
sym::type_name => (1, 0, vec![], Ty::new_static_str(tcx)),
|
||||
sym::type_id => (1, 0, vec![], tcx.types.u128),
|
||||
sym::offset => (2, 0, vec![param(0), param(1)], param(0)),
|
||||
sym::arith_offset => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Not }),
|
||||
tcx.types.isize,
|
||||
|
@ -238,6 +254,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
),
|
||||
sym::ptr_mask => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Not }),
|
||||
tcx.types.usize,
|
||||
|
@ -247,6 +264,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
|
||||
sym::copy | sym::copy_nonoverlapping => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Not }),
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Mut }),
|
||||
|
@ -256,6 +274,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
),
|
||||
sym::volatile_copy_memory | sym::volatile_copy_nonoverlapping_memory => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Mut }),
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Not }),
|
||||
|
@ -265,10 +284,11 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
),
|
||||
sym::compare_bytes => {
|
||||
let byte_ptr = Ty::new_imm_ptr(tcx, tcx.types.u8);
|
||||
(0, vec![byte_ptr, byte_ptr, tcx.types.usize], tcx.types.i32)
|
||||
(0, 0, vec![byte_ptr, byte_ptr, tcx.types.usize], tcx.types.i32)
|
||||
}
|
||||
sym::write_bytes | sym::volatile_set_memory => (
|
||||
1,
|
||||
0,
|
||||
vec![
|
||||
Ty::new_ptr(tcx, ty::TypeAndMut { ty: param(0), mutbl: hir::Mutability::Mut }),
|
||||
tcx.types.u8,
|
||||
|
@ -276,56 +296,56 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
],
|
||||
Ty::new_unit(tcx),
|
||||
),
|
||||
sym::sqrtf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::sqrtf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::powif32 => (0, vec![tcx.types.f32, tcx.types.i32], tcx.types.f32),
|
||||
sym::powif64 => (0, vec![tcx.types.f64, tcx.types.i32], tcx.types.f64),
|
||||
sym::sinf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::sinf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::cosf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::cosf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::powf32 => (0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::powf64 => (0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::expf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::expf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::exp2f32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::exp2f64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::logf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::logf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::log10f32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::log10f64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::log2f32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::log2f64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::fmaf32 => (0, vec![tcx.types.f32, tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::fmaf64 => (0, vec![tcx.types.f64, tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::fabsf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::fabsf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::minnumf32 => (0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::minnumf64 => (0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::maxnumf32 => (0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::maxnumf64 => (0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::copysignf32 => (0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::copysignf64 => (0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::floorf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::floorf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::ceilf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::ceilf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::truncf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::truncf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::rintf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::rintf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::nearbyintf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::nearbyintf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::roundf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::roundf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::roundevenf32 => (0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::roundevenf64 => (0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::sqrtf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::sqrtf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::powif32 => (0, 0, vec![tcx.types.f32, tcx.types.i32], tcx.types.f32),
|
||||
sym::powif64 => (0, 0, vec![tcx.types.f64, tcx.types.i32], tcx.types.f64),
|
||||
sym::sinf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::sinf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::cosf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::cosf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::powf32 => (0, 0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::powf64 => (0, 0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::expf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::expf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::exp2f32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::exp2f64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::logf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::logf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::log10f32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::log10f64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::log2f32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::log2f64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::fmaf32 => (0, 0, vec![tcx.types.f32, tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::fmaf64 => (0, 0, vec![tcx.types.f64, tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::fabsf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::fabsf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::minnumf32 => (0, 0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::minnumf64 => (0, 0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::maxnumf32 => (0, 0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::maxnumf64 => (0, 0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::copysignf32 => (0, 0, vec![tcx.types.f32, tcx.types.f32], tcx.types.f32),
|
||||
sym::copysignf64 => (0, 0, vec![tcx.types.f64, tcx.types.f64], tcx.types.f64),
|
||||
sym::floorf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::floorf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::ceilf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::ceilf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::truncf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::truncf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::rintf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::rintf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::nearbyintf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::nearbyintf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::roundf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::roundf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
sym::roundevenf32 => (0, 0, vec![tcx.types.f32], tcx.types.f32),
|
||||
sym::roundevenf64 => (0, 0, vec![tcx.types.f64], tcx.types.f64),
|
||||
|
||||
sym::volatile_load | sym::unaligned_volatile_load => {
|
||||
(1, vec![Ty::new_imm_ptr(tcx, param(0))], param(0))
|
||||
(1, 0, vec![Ty::new_imm_ptr(tcx, param(0))], param(0))
|
||||
}
|
||||
sym::volatile_store | sym::unaligned_volatile_store => {
|
||||
(1, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
(1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
}
|
||||
|
||||
sym::ctpop
|
||||
|
@ -334,62 +354,66 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
| sym::cttz
|
||||
| sym::cttz_nonzero
|
||||
| sym::bswap
|
||||
| sym::bitreverse => (1, vec![param(0)], param(0)),
|
||||
| sym::bitreverse => (1, 0, vec![param(0)], param(0)),
|
||||
|
||||
sym::add_with_overflow | sym::sub_with_overflow | sym::mul_with_overflow => {
|
||||
(1, vec![param(0), param(0)], Ty::new_tup(tcx, &[param(0), tcx.types.bool]))
|
||||
(1, 0, vec![param(0), param(0)], Ty::new_tup(tcx, &[param(0), tcx.types.bool]))
|
||||
}
|
||||
|
||||
sym::ptr_guaranteed_cmp => (
|
||||
1,
|
||||
0,
|
||||
vec![Ty::new_imm_ptr(tcx, param(0)), Ty::new_imm_ptr(tcx, param(0))],
|
||||
tcx.types.u8,
|
||||
),
|
||||
|
||||
sym::const_allocate => {
|
||||
(0, vec![tcx.types.usize, tcx.types.usize], Ty::new_mut_ptr(tcx, tcx.types.u8))
|
||||
(0, 1, vec![tcx.types.usize, tcx.types.usize], Ty::new_mut_ptr(tcx, tcx.types.u8))
|
||||
}
|
||||
sym::const_deallocate => (
|
||||
0,
|
||||
1,
|
||||
vec![Ty::new_mut_ptr(tcx, tcx.types.u8), tcx.types.usize, tcx.types.usize],
|
||||
Ty::new_unit(tcx),
|
||||
),
|
||||
|
||||
sym::ptr_offset_from => (
|
||||
1,
|
||||
0,
|
||||
vec![Ty::new_imm_ptr(tcx, param(0)), Ty::new_imm_ptr(tcx, param(0))],
|
||||
tcx.types.isize,
|
||||
),
|
||||
sym::ptr_offset_from_unsigned => (
|
||||
1,
|
||||
0,
|
||||
vec![Ty::new_imm_ptr(tcx, param(0)), Ty::new_imm_ptr(tcx, param(0))],
|
||||
tcx.types.usize,
|
||||
),
|
||||
sym::unchecked_div | sym::unchecked_rem | sym::exact_div => {
|
||||
(1, vec![param(0), param(0)], param(0))
|
||||
(1, 0, vec![param(0), param(0)], param(0))
|
||||
}
|
||||
sym::unchecked_shl | sym::unchecked_shr | sym::rotate_left | sym::rotate_right => {
|
||||
(1, vec![param(0), param(0)], param(0))
|
||||
(1, 0, vec![param(0), param(0)], param(0))
|
||||
}
|
||||
sym::unchecked_add | sym::unchecked_sub | sym::unchecked_mul => {
|
||||
(1, vec![param(0), param(0)], param(0))
|
||||
(1, 0, vec![param(0), param(0)], param(0))
|
||||
}
|
||||
sym::wrapping_add | sym::wrapping_sub | sym::wrapping_mul => {
|
||||
(1, vec![param(0), param(0)], param(0))
|
||||
(1, 0, vec![param(0), param(0)], param(0))
|
||||
}
|
||||
sym::saturating_add | sym::saturating_sub => (1, vec![param(0), param(0)], param(0)),
|
||||
sym::saturating_add | sym::saturating_sub => (1, 0, vec![param(0), param(0)], param(0)),
|
||||
sym::fadd_fast | sym::fsub_fast | sym::fmul_fast | sym::fdiv_fast | sym::frem_fast => {
|
||||
(1, vec![param(0), param(0)], param(0))
|
||||
(1, 0, vec![param(0), param(0)], param(0))
|
||||
}
|
||||
sym::float_to_int_unchecked => (2, vec![param(0)], param(1)),
|
||||
sym::float_to_int_unchecked => (2, 0, vec![param(0)], param(1)),
|
||||
|
||||
sym::assume => (0, vec![tcx.types.bool], Ty::new_unit(tcx)),
|
||||
sym::likely => (0, vec![tcx.types.bool], tcx.types.bool),
|
||||
sym::unlikely => (0, vec![tcx.types.bool], tcx.types.bool),
|
||||
sym::assume => (0, 0, vec![tcx.types.bool], Ty::new_unit(tcx)),
|
||||
sym::likely => (0, 0, vec![tcx.types.bool], tcx.types.bool),
|
||||
sym::unlikely => (0, 0, vec![tcx.types.bool], tcx.types.bool),
|
||||
|
||||
sym::read_via_copy => (1, vec![Ty::new_imm_ptr(tcx, param(0))], param(0)),
|
||||
sym::read_via_copy => (1, 0, vec![Ty::new_imm_ptr(tcx, param(0))], param(0)),
|
||||
sym::write_via_move => {
|
||||
(1, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
(1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
}
|
||||
|
||||
sym::discriminant_value => {
|
||||
|
@ -401,6 +425,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
let br = ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon };
|
||||
(
|
||||
1,
|
||||
0,
|
||||
vec![Ty::new_imm_ref(
|
||||
tcx,
|
||||
ty::Region::new_bound(tcx, ty::INNERMOST, br),
|
||||
|
@ -427,6 +452,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
Abi::Rust,
|
||||
));
|
||||
(
|
||||
0,
|
||||
0,
|
||||
vec![Ty::new_fn_ptr(tcx, try_fn_ty), mut_u8, Ty::new_fn_ptr(tcx, catch_fn_ty)],
|
||||
tcx.types.i32,
|
||||
|
@ -434,61 +460,66 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
|||
}
|
||||
|
||||
sym::va_start | sym::va_end => match mk_va_list_ty(hir::Mutability::Mut) {
|
||||
Some((va_list_ref_ty, _)) => (0, vec![va_list_ref_ty], Ty::new_unit(tcx)),
|
||||
Some((va_list_ref_ty, _)) => (0, 0, vec![va_list_ref_ty], Ty::new_unit(tcx)),
|
||||
None => bug!("`va_list` language item needed for C-variadic intrinsics"),
|
||||
},
|
||||
|
||||
sym::va_copy => match mk_va_list_ty(hir::Mutability::Not) {
|
||||
Some((va_list_ref_ty, va_list_ty)) => {
|
||||
let va_list_ptr_ty = Ty::new_mut_ptr(tcx, va_list_ty);
|
||||
(0, vec![va_list_ptr_ty, va_list_ref_ty], Ty::new_unit(tcx))
|
||||
(0, 0, vec![va_list_ptr_ty, va_list_ref_ty], Ty::new_unit(tcx))
|
||||
}
|
||||
None => bug!("`va_list` language item needed for C-variadic intrinsics"),
|
||||
},
|
||||
|
||||
sym::va_arg => match mk_va_list_ty(hir::Mutability::Mut) {
|
||||
Some((va_list_ref_ty, _)) => (1, vec![va_list_ref_ty], param(0)),
|
||||
Some((va_list_ref_ty, _)) => (1, 0, vec![va_list_ref_ty], param(0)),
|
||||
None => bug!("`va_list` language item needed for C-variadic intrinsics"),
|
||||
},
|
||||
|
||||
sym::nontemporal_store => {
|
||||
(1, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
(1, 0, vec![Ty::new_mut_ptr(tcx, param(0)), param(0)], Ty::new_unit(tcx))
|
||||
}
|
||||
|
||||
sym::raw_eq => {
|
||||
let br = ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon };
|
||||
let param_ty =
|
||||
Ty::new_imm_ref(tcx, ty::Region::new_bound(tcx, ty::INNERMOST, br), param(0));
|
||||
(1, vec![param_ty; 2], tcx.types.bool)
|
||||
(1, 0, vec![param_ty; 2], tcx.types.bool)
|
||||
}
|
||||
|
||||
sym::black_box => (1, vec![param(0)], param(0)),
|
||||
sym::black_box => (1, 0, vec![param(0)], param(0)),
|
||||
|
||||
sym::is_val_statically_known => (1, vec![param(0)], tcx.types.bool),
|
||||
sym::is_val_statically_known => (1, 1, vec![param(0)], tcx.types.bool),
|
||||
|
||||
sym::const_eval_select => (4, vec![param(0), param(1), param(2)], param(3)),
|
||||
sym::const_eval_select => (4, 0, vec![param(0), param(1), param(2)], param(3)),
|
||||
|
||||
sym::vtable_size | sym::vtable_align => {
|
||||
(0, vec![Ty::new_imm_ptr(tcx, Ty::new_unit(tcx))], tcx.types.usize)
|
||||
(0, 0, vec![Ty::new_imm_ptr(tcx, Ty::new_unit(tcx))], tcx.types.usize)
|
||||
}
|
||||
|
||||
sym::debug_assertions => (0, Vec::new(), tcx.types.bool),
|
||||
sym::debug_assertions => (0, 1, Vec::new(), tcx.types.bool),
|
||||
|
||||
other => {
|
||||
tcx.dcx().emit_err(UnrecognizedIntrinsicFunction { span: it.span, name: other });
|
||||
tcx.dcx().emit_err(UnrecognizedIntrinsicFunction { span, name: other });
|
||||
return;
|
||||
}
|
||||
};
|
||||
(n_tps, 0, inputs, output, unsafety)
|
||||
(n_tps, 0, n_cts, inputs, output, unsafety)
|
||||
};
|
||||
let sig = tcx.mk_fn_sig(inputs, output, false, unsafety, Abi::RustIntrinsic);
|
||||
let sig = tcx.mk_fn_sig(inputs, output, false, unsafety, abi);
|
||||
let sig = ty::Binder::bind_with_vars(sig, bound_vars);
|
||||
equate_intrinsic_type(tcx, it, n_tps, n_lts, 0, sig)
|
||||
equate_intrinsic_type(tcx, span, intrinsic_id, n_tps, n_lts, n_cts, sig)
|
||||
}
|
||||
|
||||
/// Type-check `extern "platform-intrinsic" { ... }` functions.
|
||||
pub fn check_platform_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
||||
let generics = tcx.generics_of(it.owner_id);
|
||||
pub fn check_platform_intrinsic_type(
|
||||
tcx: TyCtxt<'_>,
|
||||
intrinsic_id: LocalDefId,
|
||||
span: Span,
|
||||
name: Symbol,
|
||||
) {
|
||||
let generics = tcx.generics_of(intrinsic_id);
|
||||
let param = |n| {
|
||||
if let Some(&ty::GenericParamDef {
|
||||
name, kind: ty::GenericParamDefKind::Type { .. }, ..
|
||||
|
@ -496,12 +527,10 @@ pub fn check_platform_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>)
|
|||
{
|
||||
Ty::new_param(tcx, n, name)
|
||||
} else {
|
||||
Ty::new_error_with_message(tcx, tcx.def_span(it.owner_id), "expected param")
|
||||
Ty::new_error_with_message(tcx, span, "expected param")
|
||||
}
|
||||
};
|
||||
|
||||
let name = it.ident.name;
|
||||
|
||||
let (n_tps, n_cts, inputs, output) = match name {
|
||||
sym::simd_eq | sym::simd_ne | sym::simd_lt | sym::simd_le | sym::simd_gt | sym::simd_ge => {
|
||||
(2, 0, vec![param(0), param(0)], param(1))
|
||||
|
@ -574,12 +603,12 @@ pub fn check_platform_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>)
|
|||
sym::simd_shuffle_generic => (2, 1, vec![param(0), param(0)], param(1)),
|
||||
_ => {
|
||||
let msg = format!("unrecognized platform-specific intrinsic function: `{name}`");
|
||||
tcx.dcx().span_err(it.span, msg);
|
||||
tcx.dcx().span_err(span, msg);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let sig = tcx.mk_fn_sig(inputs, output, false, hir::Unsafety::Unsafe, Abi::PlatformIntrinsic);
|
||||
let sig = ty::Binder::dummy(sig);
|
||||
equate_intrinsic_type(tcx, it, n_tps, 0, n_cts, sig)
|
||||
equate_intrinsic_type(tcx, span, intrinsic_id, n_tps, 0, n_cts, sig)
|
||||
}
|
||||
|
|
|
@ -1651,7 +1651,7 @@ fn compute_sig_of_foreign_fn_decl<'tcx>(
|
|||
abi: abi::Abi,
|
||||
) -> ty::PolyFnSig<'tcx> {
|
||||
let unsafety = if abi == abi::Abi::RustIntrinsic {
|
||||
intrinsic_operation_unsafety(tcx, def_id.to_def_id())
|
||||
intrinsic_operation_unsafety(tcx, def_id)
|
||||
} else {
|
||||
hir::Unsafety::Unsafe
|
||||
};
|
||||
|
|
|
@ -540,8 +540,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||
|
||||
if let Some(def_id) = def_id
|
||||
&& self.tcx.def_kind(def_id) == hir::def::DefKind::Fn
|
||||
&& self.tcx.is_intrinsic(def_id)
|
||||
&& self.tcx.item_name(def_id) == sym::const_eval_select
|
||||
&& matches!(self.tcx.intrinsic(def_id), Some(sym::const_eval_select))
|
||||
{
|
||||
let fn_sig = self.resolve_vars_if_possible(fn_sig);
|
||||
for idx in 0..=1 {
|
||||
|
|
|
@ -867,7 +867,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
|
|||
let a_sig = a.fn_sig(self.tcx);
|
||||
if let ty::FnDef(def_id, _) = *a.kind() {
|
||||
// Intrinsics are not coercible to function pointers
|
||||
if self.tcx.is_intrinsic(def_id) {
|
||||
if self.tcx.intrinsic(def_id).is_some() {
|
||||
return Err(TypeError::IntrinsicCast);
|
||||
}
|
||||
|
||||
|
|
|
@ -316,7 +316,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||
|
||||
if !self.same_type_modulo_infer(*found_sig, *expected_sig)
|
||||
|| !sig.is_suggestable(self.tcx, true)
|
||||
|| self.tcx.is_intrinsic(*did)
|
||||
|| self.tcx.intrinsic(*did).is_some()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
@ -348,8 +348,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||
if !self.same_type_modulo_infer(*found_sig, *expected_sig)
|
||||
|| !found_sig.is_suggestable(self.tcx, true)
|
||||
|| !expected_sig.is_suggestable(self.tcx, true)
|
||||
|| self.tcx.is_intrinsic(*did1)
|
||||
|| self.tcx.is_intrinsic(*did2)
|
||||
|| self.tcx.intrinsic(*did1).is_some()
|
||||
|| self.tcx.intrinsic(*did2).is_some()
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -1227,7 +1227,7 @@ impl<'tcx> LateLintPass<'tcx> for MutableTransmutes {
|
|||
}
|
||||
|
||||
fn def_id_is_transmute(cx: &LateContext<'_>, def_id: DefId) -> bool {
|
||||
cx.tcx.is_intrinsic(def_id) && cx.tcx.item_name(def_id) == sym::transmute
|
||||
matches!(cx.tcx.intrinsic(def_id), Some(sym::transmute))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1749,8 +1749,8 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
|
|||
self.root.tables.attr_flags.get(self, index)
|
||||
}
|
||||
|
||||
fn get_is_intrinsic(self, index: DefIndex) -> bool {
|
||||
self.root.tables.is_intrinsic.get(self, index)
|
||||
fn get_intrinsic(self, index: DefIndex) -> Option<Symbol> {
|
||||
self.root.tables.intrinsic.get(self, index).map(|d| d.decode(self))
|
||||
}
|
||||
|
||||
fn get_doc_link_resolutions(self, index: DefIndex) -> DocLinkResMap {
|
||||
|
|
|
@ -356,7 +356,7 @@ provide! { tcx, def_id, other, cdata,
|
|||
cdata.get_stability_implications(tcx).iter().copied().collect()
|
||||
}
|
||||
stripped_cfg_items => { cdata.get_stripped_cfg_items(cdata.cnum, tcx) }
|
||||
is_intrinsic => { cdata.get_is_intrinsic(def_id.index) }
|
||||
intrinsic => { cdata.get_intrinsic(def_id.index) }
|
||||
defined_lang_items => { cdata.get_lang_items(tcx) }
|
||||
diagnostic_items => { cdata.get_diagnostic_items() }
|
||||
missing_lang_items => { cdata.get_missing_lang_items(tcx) }
|
||||
|
|
|
@ -1409,7 +1409,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
|||
if let DefKind::Fn | DefKind::AssocFn = def_kind {
|
||||
self.tables.asyncness.set_some(def_id.index, tcx.asyncness(def_id));
|
||||
record_array!(self.tables.fn_arg_names[def_id] <- tcx.fn_arg_names(def_id));
|
||||
self.tables.is_intrinsic.set(def_id.index, tcx.is_intrinsic(def_id));
|
||||
if let Some(name) = tcx.intrinsic(def_id) {
|
||||
record!(self.tables.intrinsic[def_id] <- name);
|
||||
}
|
||||
}
|
||||
if let DefKind::TyParam = def_kind {
|
||||
let default = self.tcx.object_lifetime_default(def_id);
|
||||
|
|
|
@ -375,7 +375,7 @@ macro_rules! define_tables {
|
|||
|
||||
define_tables! {
|
||||
- defaulted:
|
||||
is_intrinsic: Table<DefIndex, bool>,
|
||||
intrinsic: Table<DefIndex, Option<LazyValue<Symbol>>>,
|
||||
is_macro_rules: Table<DefIndex, bool>,
|
||||
is_type_alias_impl_trait: Table<DefIndex, bool>,
|
||||
type_alias_is_lazy: Table<DefIndex, bool>,
|
||||
|
|
|
@ -241,6 +241,7 @@ trivial! {
|
|||
Option<rustc_target::abi::FieldIdx>,
|
||||
Option<rustc_target::spec::PanicStrategy>,
|
||||
Option<usize>,
|
||||
Option<rustc_span::Symbol>,
|
||||
Result<(), rustc_errors::ErrorGuaranteed>,
|
||||
Result<(), rustc_middle::traits::query::NoSolution>,
|
||||
Result<rustc_middle::traits::EvaluationResult, rustc_middle::traits::OverflowError>,
|
||||
|
|
|
@ -1760,8 +1760,8 @@ rustc_queries! {
|
|||
separate_provide_extern
|
||||
}
|
||||
/// Whether the function is an intrinsic
|
||||
query is_intrinsic(def_id: DefId) -> bool {
|
||||
desc { |tcx| "checking whether `{}` is an intrinsic", tcx.def_path_str(def_id) }
|
||||
query intrinsic(def_id: DefId) -> Option<Symbol> {
|
||||
desc { |tcx| "fetch intrinsic name if `{}` is an intrinsic", tcx.def_path_str(def_id) }
|
||||
separate_provide_extern
|
||||
}
|
||||
/// Returns the lang items defined in another crate by loading it from metadata.
|
||||
|
|
|
@ -18,7 +18,7 @@ use rustc_hir::def_id::{CrateNum, DefId, LocalDefId};
|
|||
use rustc_index::bit_set::GrowableBitSet;
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_session::Limit;
|
||||
use rustc_span::sym;
|
||||
use rustc_span::{sym, Symbol};
|
||||
use rustc_target::abi::{Integer, IntegerType, Primitive, Size};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use smallvec::SmallVec;
|
||||
|
@ -1552,9 +1552,15 @@ pub fn is_doc_notable_trait(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
|
|||
.any(|items| items.iter().any(|item| item.has_name(sym::notable_trait)))
|
||||
}
|
||||
|
||||
/// Determines whether an item is an intrinsic by Abi.
|
||||
pub fn is_intrinsic(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
|
||||
matches!(tcx.fn_sig(def_id).skip_binder().abi(), Abi::RustIntrinsic | Abi::PlatformIntrinsic)
|
||||
/// Determines whether an item is an intrinsic by Abi. or by whether it has a `rustc_intrinsic` attribute
|
||||
pub fn intrinsic(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<Symbol> {
|
||||
if matches!(tcx.fn_sig(def_id).skip_binder().abi(), Abi::RustIntrinsic | Abi::PlatformIntrinsic)
|
||||
|| tcx.has_attr(def_id, sym::rustc_intrinsic)
|
||||
{
|
||||
Some(tcx.item_name(def_id.into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
|
@ -1562,7 +1568,7 @@ pub fn provide(providers: &mut Providers) {
|
|||
reveal_opaque_types_in_bounds,
|
||||
is_doc_hidden,
|
||||
is_doc_notable_trait,
|
||||
is_intrinsic,
|
||||
intrinsic,
|
||||
..*providers
|
||||
}
|
||||
}
|
||||
|
|
|
@ -202,8 +202,7 @@ impl PeekCall {
|
|||
&terminator.kind
|
||||
{
|
||||
if let ty::FnDef(def_id, fn_args) = *func.const_.ty().kind() {
|
||||
let name = tcx.item_name(def_id);
|
||||
if !tcx.is_intrinsic(def_id) || name != sym::rustc_peek {
|
||||
if tcx.intrinsic(def_id)? != sym::rustc_peek {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
|
|||
TerminatorKind::Call { func: Operand::Constant(ref f), unwind, .. } => {
|
||||
let fn_ty = self.instantiate_ty(f.const_.ty());
|
||||
self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind()
|
||||
&& tcx.is_intrinsic(def_id)
|
||||
&& tcx.intrinsic(def_id).is_some()
|
||||
{
|
||||
// Don't give intrinsics the extra penalty for calls
|
||||
INSTR_COST
|
||||
|
|
|
@ -289,9 +289,9 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
|
|||
if args.is_empty() {
|
||||
return;
|
||||
}
|
||||
let ty = args.type_at(0);
|
||||
|
||||
let known_is_valid = intrinsic_assert_panics(self.tcx, self.param_env, ty, intrinsic_name);
|
||||
let known_is_valid =
|
||||
intrinsic_assert_panics(self.tcx, self.param_env, args[0], intrinsic_name);
|
||||
match known_is_valid {
|
||||
// We don't know the layout or it's not validity assertion at all, don't touch it
|
||||
None => {}
|
||||
|
@ -310,10 +310,11 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
|
|||
fn intrinsic_assert_panics<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
arg: ty::GenericArg<'tcx>,
|
||||
intrinsic_name: Symbol,
|
||||
) -> Option<bool> {
|
||||
let requirement = ValidityRequirement::from_intrinsic(intrinsic_name)?;
|
||||
let ty = arg.expect_ty();
|
||||
Some(!tcx.check_validity_requirement((requirement, param_env.and(ty))).ok()?)
|
||||
}
|
||||
|
||||
|
@ -322,9 +323,8 @@ fn resolve_rust_intrinsic<'tcx>(
|
|||
func_ty: Ty<'tcx>,
|
||||
) -> Option<(Symbol, GenericArgsRef<'tcx>)> {
|
||||
if let ty::FnDef(def_id, args) = *func_ty.kind() {
|
||||
if tcx.is_intrinsic(def_id) {
|
||||
return Some((tcx.item_name(def_id), args));
|
||||
}
|
||||
let name = tcx.intrinsic(def_id)?;
|
||||
return Some((name, args));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
@ -161,8 +161,7 @@ fn remap_mir_for_const_eval_select<'tcx>(
|
|||
fn_span,
|
||||
..
|
||||
} if let ty::FnDef(def_id, _) = *const_.ty().kind()
|
||||
&& tcx.item_name(def_id) == sym::const_eval_select
|
||||
&& tcx.is_intrinsic(def_id) =>
|
||||
&& matches!(tcx.intrinsic(def_id), Some(sym::const_eval_select)) =>
|
||||
{
|
||||
let [tupled_args, called_in_const, called_at_rt]: [_; 3] =
|
||||
std::mem::take(args).try_into().unwrap();
|
||||
|
|
|
@ -14,9 +14,8 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
|
|||
if let TerminatorKind::Call { func, args, destination, target, .. } =
|
||||
&mut terminator.kind
|
||||
&& let ty::FnDef(def_id, generic_args) = *func.ty(local_decls, tcx).kind()
|
||||
&& tcx.is_intrinsic(def_id)
|
||||
&& let Some(intrinsic_name) = tcx.intrinsic(def_id)
|
||||
{
|
||||
let intrinsic_name = tcx.item_name(def_id);
|
||||
match intrinsic_name {
|
||||
sym::unreachable => {
|
||||
terminator.kind = TerminatorKind::Unreachable;
|
||||
|
|
|
@ -956,19 +956,24 @@ fn visit_instance_use<'tcx>(
|
|||
if !should_codegen_locally(tcx, &instance) {
|
||||
return;
|
||||
}
|
||||
|
||||
// The intrinsics assert_inhabited, assert_zero_valid, and assert_mem_uninitialized_valid will
|
||||
// be lowered in codegen to nothing or a call to panic_nounwind. So if we encounter any
|
||||
// of those intrinsics, we need to include a mono item for panic_nounwind, else we may try to
|
||||
// codegen a call to that function without generating code for the function itself.
|
||||
if let ty::InstanceDef::Intrinsic(def_id) = instance.def {
|
||||
let name = tcx.item_name(def_id);
|
||||
if let Some(_requirement) = ValidityRequirement::from_intrinsic(name) {
|
||||
// The intrinsics assert_inhabited, assert_zero_valid, and assert_mem_uninitialized_valid will
|
||||
// be lowered in codegen to nothing or a call to panic_nounwind. So if we encounter any
|
||||
// of those intrinsics, we need to include a mono item for panic_nounwind, else we may try to
|
||||
// codegen a call to that function without generating code for the function itself.
|
||||
let def_id = tcx.lang_items().get(LangItem::PanicNounwind).unwrap();
|
||||
let panic_instance = Instance::mono(tcx, def_id);
|
||||
if should_codegen_locally(tcx, &panic_instance) {
|
||||
output.push(create_fn_mono_item(tcx, panic_instance, source));
|
||||
}
|
||||
} else if tcx.has_attr(def_id, sym::rustc_intrinsic) {
|
||||
// Codegen the fallback body of intrinsics with fallback bodies
|
||||
let instance = ty::Instance::new(def_id, instance.args);
|
||||
if should_codegen_locally(tcx, &instance) {
|
||||
output.push(create_fn_mono_item(tcx, instance, source));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1422,6 +1422,7 @@ symbols! {
|
|||
rustc_if_this_changed,
|
||||
rustc_inherit_overflow_checks,
|
||||
rustc_insignificant_dtor,
|
||||
rustc_intrinsic,
|
||||
rustc_layout,
|
||||
rustc_layout_scalar_valid_range_end,
|
||||
rustc_layout_scalar_valid_range_start,
|
||||
|
|
|
@ -28,7 +28,8 @@ fn resolve_instance<'tcx>(
|
|||
tcx.normalize_erasing_regions(param_env, args),
|
||||
)
|
||||
} else {
|
||||
let def = if matches!(tcx.def_kind(def_id), DefKind::Fn) && tcx.is_intrinsic(def_id) {
|
||||
let def = if matches!(tcx.def_kind(def_id), DefKind::Fn) && tcx.intrinsic(def_id).is_some()
|
||||
{
|
||||
debug!(" => intrinsic");
|
||||
ty::InstanceDef::Intrinsic(def_id)
|
||||
} else if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue