rust/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs

467 lines
20 KiB
Rust
Raw Normal View History

//! See docs in `build/expr/mod.rs`.
2015-08-18 17:59:21 -04:00
use rustc_index::vec::Idx;
2015-08-18 17:59:21 -04:00
2019-02-08 06:28:15 +09:00
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
2020-07-21 09:09:27 +00:00
use crate::thir::*;
2020-03-29 16:41:09 +02:00
use rustc_middle::middle::region;
use rustc_middle::mir::AssertKind;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, UpvarSubsts};
use rustc_span::Span;
2015-08-18 17:59:21 -04:00
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Returns an rvalue suitable for use until the end of the current
/// scope expression.
///
/// The operand returned from this function will *not be valid* after
/// an ExprKind::Scope is passed, so please do *not* return it from
/// functions to avoid bad miscompiles.
crate fn as_local_rvalue<M>(&mut self, block: BasicBlock, expr: M) -> BlockAnd<Rvalue<'tcx>>
2018-09-06 22:34:26 +01:00
where
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let local_scope = self.local_scope();
self.as_rvalue(block, local_scope, expr)
}
2015-08-18 17:59:21 -04:00
/// Compile `expr`, yielding an rvalue.
fn as_rvalue<M>(
2018-09-06 22:34:26 +01:00
&mut self,
block: BasicBlock,
scope: Option<region::Scope>,
expr: M,
) -> BlockAnd<Rvalue<'tcx>>
where
M: Mirror<'tcx, Output = Expr<'tcx>>,
2015-08-18 17:59:21 -04:00
{
let expr = self.hir.mirror(expr);
self.expr_as_rvalue(block, scope, expr)
2015-08-18 17:59:21 -04:00
}
2018-09-06 22:34:26 +01:00
fn expr_as_rvalue(
&mut self,
mut block: BasicBlock,
scope: Option<region::Scope>,
expr: Expr<'tcx>,
) -> BlockAnd<Rvalue<'tcx>> {
2019-12-24 17:38:22 -05:00
debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr);
2015-08-18 17:59:21 -04:00
let this = self;
let expr_span = expr.span;
let source_info = this.source_info(expr_span);
2015-08-18 17:59:21 -04:00
match expr.kind {
2020-05-02 21:44:25 +02:00
ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)),
2019-12-24 17:38:22 -05:00
ExprKind::Scope { region_scope, lint_level, value } => {
let region_scope = (region_scope, source_info);
2019-12-24 17:38:22 -05:00
this.in_scope(region_scope, lint_level, |this| this.as_rvalue(block, scope, value))
2015-08-18 17:59:21 -04:00
}
ExprKind::Repeat { value, count } => {
let value_operand = unpack!(block = this.as_operand(block, scope, value));
block.and(Rvalue::Repeat(value_operand, count))
2015-08-18 17:59:21 -04:00
}
ExprKind::Binary { op, lhs, rhs } => {
let lhs = unpack!(block = this.as_operand(block, scope, lhs));
let rhs = unpack!(block = this.as_operand(block, scope, rhs));
2018-09-06 22:34:26 +01:00
this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs)
2015-08-18 17:59:21 -04:00
}
ExprKind::Unary { op, arg } => {
let arg = unpack!(block = this.as_operand(block, scope, arg));
// Check for -MIN on signed integers
if this.hir.check_overflow() && op == UnOp::Neg && expr.ty.is_signed() {
let bool_ty = this.hir.bool_ty();
let minval = this.minval_literal(expr_span, expr.ty);
2017-04-11 23:52:51 +03:00
let is_min = this.temp(bool_ty, expr_span);
2018-09-06 22:34:26 +01:00
this.cfg.push_assign(
block,
source_info,
is_min,
2018-09-06 22:34:26 +01:00
Rvalue::BinaryOp(BinOp::Eq, arg.to_copy(), minval),
);
2018-09-06 22:34:26 +01:00
block = this.assert(
block,
Operand::Move(is_min),
false,
AssertKind::OverflowNeg(arg.to_copy()),
2018-09-06 22:34:26 +01:00
expr_span,
);
}
2015-08-18 17:59:21 -04:00
block.and(Rvalue::UnaryOp(op, arg))
}
ExprKind::Box { value } => {
2015-08-18 17:59:21 -04:00
let value = this.hir.mirror(value);
2017-09-20 16:36:20 +03:00
// The `Box<T>` temporary created here is not a part of the HIR,
// and therefore is not considered during generator OIBIT
// determination. See the comment about `box` at `yield_in_scope`.
let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span).internal());
2018-09-06 22:34:26 +01:00
this.cfg.push(
block,
2019-12-24 17:38:22 -05:00
Statement { source_info, kind: StatementKind::StorageLive(result) },
2018-09-06 22:34:26 +01:00
);
if let Some(scope) = scope {
// schedule a shallow free of that memory, lest we unwind:
2019-12-24 17:38:22 -05:00
this.schedule_drop_storage_and_value(expr_span, scope, result);
}
// malloc some memory of suitable type (thus far, uninitialized):
let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty);
this.cfg.push_assign(block, source_info, Place::from(result), box_);
// initialize the box contents:
unpack!(
block =
this.into(this.hir.tcx().mk_place_deref(Place::from(result)), block, value)
);
block.and(Rvalue::Use(Operand::Move(Place::from(result))))
2015-08-18 17:59:21 -04:00
}
ExprKind::Cast { source } => {
let source = unpack!(block = this.as_operand(block, scope, source));
block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty))
2015-08-18 17:59:21 -04:00
}
ExprKind::Pointer { cast, source } => {
let source = unpack!(block = this.as_operand(block, scope, source));
block.and(Rvalue::Cast(CastKind::Pointer(cast), source, expr.ty))
2015-08-18 17:59:21 -04:00
}
ExprKind::Array { fields } => {
2018-05-08 16:10:16 +03:00
// (*) We would (maybe) be closer to codegen if we
2015-08-18 17:59:21 -04:00
// handled this and other aggregate cases via
// `into()`, not `as_rvalue` -- in that case, instead
// of generating
//
// let tmp1 = ...1;
// let tmp2 = ...2;
// dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])
//
// we could just generate
//
// dest.f = ...1;
// dest.g = ...2;
//
// The problem is that then we would need to:
//
// (a) have a more complex mechanism for handling
// partial cleanup;
// (b) distinguish the case where the type `Foo` has a
// destructor, in which case creating an instance
// as a whole "arms" the destructor, and you can't
// write individual fields; and,
// (c) handle the case where the type Foo has no
// fields. We don't want `let x: ();` to compile
// to the same MIR as `let x = ();`.
// first process the set of fields
2017-02-26 00:32:14 +02:00
let el_ty = expr.ty.sequence_element_type(this.hir.tcx());
2018-09-06 22:34:26 +01:00
let fields: Vec<_> = fields
.into_iter()
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
2015-08-18 17:59:21 -04:00
block.and(Rvalue::Aggregate(box AggregateKind::Array(el_ty), fields))
2015-08-18 17:59:21 -04:00
}
2018-09-06 22:34:26 +01:00
ExprKind::Tuple { fields } => {
// see (*) above
2015-08-18 17:59:21 -04:00
// first process the set of fields
2018-09-06 22:34:26 +01:00
let fields: Vec<_> = fields
.into_iter()
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
2015-08-18 17:59:21 -04:00
block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields))
2015-08-18 17:59:21 -04:00
}
2019-12-24 17:38:22 -05:00
ExprKind::Closure { closure_id, substs, upvars, movability } => {
// see (*) above
let operands: Vec<_> = upvars
2018-07-03 20:12:09 +01:00
.into_iter()
.map(|upvar| {
let upvar = this.hir.mirror(upvar);
match Category::of(&upvar.kind) {
// Use as_place to avoid creating a temporary when
// moving a variable into a closure, so that
// borrowck knows which variables to mark as being
// used as mut. This is OK here because the upvar
// expressions have no side effects and act on
// disjoint places.
// This occurs when capturing by copy/move, while
// by reference captures use as_operand
Some(Category::Place) => {
let place = unpack!(block = this.as_place(block, upvar));
this.consume_by_copy_or_move(place)
}
_ => {
// Turn mutable borrow captures into unique
// borrow captures when capturing an immutable
// variable. This is sound because the mutation
// that caused the capture will cause an error.
match upvar.kind {
ExprKind::Borrow {
2018-09-06 22:34:26 +01:00
borrow_kind:
2019-12-24 17:38:22 -05:00
BorrowKind::Mut { allow_two_phase_borrow: false },
arg,
2018-09-06 22:34:26 +01:00
} => unpack!(
block = this.limit_capture_mutability(
upvar.span, upvar.ty, scope, block, arg,
2018-09-06 22:34:26 +01:00
)
),
_ => unpack!(block = this.as_operand(block, scope, upvar)),
}
2018-07-03 20:12:09 +01:00
}
}
2019-12-24 17:38:22 -05:00
})
.collect();
let result = match substs {
UpvarSubsts::Generator(substs) => {
// We implicitly set the discriminant to 0. See
// librustc_mir/transform/deaggregator.rs for details.
let movability = movability.unwrap();
box AggregateKind::Generator(closure_id, substs, movability)
}
2018-09-06 22:34:26 +01:00
UpvarSubsts::Closure(substs) => box AggregateKind::Closure(closure_id, substs),
2016-12-26 14:34:03 +01:00
};
block.and(Rvalue::Aggregate(result, operands))
2015-08-18 17:59:21 -04:00
}
2018-09-06 22:34:26 +01:00
ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => {
block = unpack!(this.stmt_expr(block, expr, None));
block.and(Rvalue::Use(Operand::Constant(box Constant {
span: expr_span,
user_ty: None,
literal: ty::Const::zero_sized(this.hir.tcx(), this.hir.tcx().types.unit),
})))
}
ExprKind::Yield { .. }
| ExprKind::Literal { .. }
2020-10-06 17:51:15 -03:00
| ExprKind::ConstBlock { .. }
2019-11-18 23:04:06 +00:00
| ExprKind::StaticRef { .. }
2018-09-06 22:34:26 +01:00
| ExprKind::Block { .. }
| ExprKind::Match { .. }
| ExprKind::NeverToAny { .. }
| ExprKind::Use { .. }
| ExprKind::Borrow { .. }
| ExprKind::AddressOf { .. }
| ExprKind::Adt { .. }
2018-09-06 22:34:26 +01:00
| ExprKind::Loop { .. }
| ExprKind::LogicalOp { .. }
| ExprKind::Call { .. }
| ExprKind::Field { .. }
| ExprKind::Deref { .. }
| ExprKind::Index { .. }
| ExprKind::VarRef { .. }
| ExprKind::SelfRef
| ExprKind::Break { .. }
| ExprKind::Continue { .. }
| ExprKind::Return { .. }
2020-02-14 18:17:50 +00:00
| ExprKind::InlineAsm { .. }
| ExprKind::LlvmInlineAsm { .. }
2018-09-20 18:43:35 -07:00
| ExprKind::PlaceTypeAscription { .. }
| ExprKind::ValueTypeAscription { .. } => {
2015-08-18 17:59:21 -04:00
// these do not have corresponding `Rvalue` variants,
// so make an operand and then return that
2020-10-26 21:02:48 -04:00
debug_assert!(!matches!(Category::of(&expr.kind), Some(Category::Rvalue(RvalueFunc::AsRvalue))));
let operand = unpack!(block = this.as_operand(block, scope, expr));
2015-08-18 17:59:21 -04:00
block.and(Rvalue::Use(operand))
}
}
}
crate fn build_binary_op(
2018-09-06 22:34:26 +01:00
&mut self,
mut block: BasicBlock,
op: BinOp,
span: Span,
ty: Ty<'tcx>,
lhs: Operand<'tcx>,
rhs: Operand<'tcx>,
) -> BlockAnd<Rvalue<'tcx>> {
let source_info = self.source_info(span);
let bool_ty = self.hir.bool_ty();
if self.hir.check_overflow() && op.is_checkable() && ty.is_integral() {
let result_tup = self.hir.tcx().intern_tup(&[ty, bool_ty]);
2017-04-11 23:52:51 +03:00
let result_value = self.temp(result_tup, span);
2018-09-06 22:34:26 +01:00
self.cfg.push_assign(
block,
source_info,
result_value,
Rvalue::CheckedBinaryOp(op, lhs.to_copy(), rhs.to_copy()),
2018-09-06 22:34:26 +01:00
);
let val_fld = Field::new(0);
let of_fld = Field::new(1);
let tcx = self.hir.tcx();
let val = tcx.mk_place_field(result_value, val_fld, ty);
let of = tcx.mk_place_field(result_value, of_fld, bool_ty);
let err = AssertKind::Overflow(op, lhs, rhs);
2018-09-06 22:34:26 +01:00
block = self.assert(block, Operand::Move(of), false, err, span);
block.and(Rvalue::Use(Operand::Move(val)))
} else {
if ty.is_integral() && (op == BinOp::Div || op == BinOp::Rem) {
// Checking division and remainder is more complex, since we 1. always check
// and 2. there are two possible failure cases, divide-by-zero and overflow.
let zero_err = if op == BinOp::Div {
AssertKind::DivisionByZero(lhs.to_copy())
} else {
AssertKind::RemainderByZero(lhs.to_copy())
};
let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy());
// Check for / 0
2017-04-11 23:52:51 +03:00
let is_zero = self.temp(bool_ty, span);
let zero = self.zero_literal(span, ty);
2018-09-06 22:34:26 +01:00
self.cfg.push_assign(
block,
source_info,
is_zero,
2018-09-06 22:34:26 +01:00
Rvalue::BinaryOp(BinOp::Eq, rhs.to_copy(), zero),
);
2018-09-06 22:34:26 +01:00
block = self.assert(block, Operand::Move(is_zero), false, zero_err, span);
// We only need to check for the overflow in one case:
// MIN / -1, and only for signed values.
if ty.is_signed() {
let neg_1 = self.neg_1_literal(span, ty);
let min = self.minval_literal(span, ty);
2017-04-11 23:52:51 +03:00
let is_neg_1 = self.temp(bool_ty, span);
2018-09-06 22:34:26 +01:00
let is_min = self.temp(bool_ty, span);
let of = self.temp(bool_ty, span);
// this does (rhs == -1) & (lhs == MIN). It could short-circuit instead
2018-09-06 22:34:26 +01:00
self.cfg.push_assign(
block,
source_info,
is_neg_1,
2018-09-06 22:34:26 +01:00
Rvalue::BinaryOp(BinOp::Eq, rhs.to_copy(), neg_1),
);
self.cfg.push_assign(
block,
source_info,
is_min,
2018-09-06 22:34:26 +01:00
Rvalue::BinaryOp(BinOp::Eq, lhs.to_copy(), min),
);
let is_neg_1 = Operand::Move(is_neg_1);
let is_min = Operand::Move(is_min);
2018-09-06 22:34:26 +01:00
self.cfg.push_assign(
block,
source_info,
of,
2018-09-06 22:34:26 +01:00
Rvalue::BinaryOp(BinOp::BitAnd, is_neg_1, is_min),
);
2018-09-06 22:34:26 +01:00
block = self.assert(block, Operand::Move(of), false, overflow_err, span);
}
}
block.and(Rvalue::BinaryOp(op, lhs, rhs))
}
}
fn limit_capture_mutability(
&mut self,
upvar_span: Span,
upvar_ty: Ty<'tcx>,
temp_lifetime: Option<region::Scope>,
mut block: BasicBlock,
arg: ExprRef<'tcx>,
) -> BlockAnd<Operand<'tcx>> {
let this = self;
let source_info = this.source_info(upvar_span);
let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span));
2019-12-24 17:38:22 -05:00
this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) });
let arg_place = unpack!(block = this.as_place(block, arg));
let mutability = match arg_place.as_ref() {
PlaceRef { local, projection: &[] } => this.local_decls[local].mutability,
PlaceRef { local, projection: &[ProjectionElem::Deref] } => {
debug_assert!(
this.local_decls[local].is_ref_for_guard(),
"Unexpected capture place",
);
this.local_decls[local].mutability
}
PlaceRef {
local,
projection: &[ref proj_base @ .., ProjectionElem::Field(upvar_index, _)],
}
| PlaceRef {
local,
2019-12-24 17:38:22 -05:00
projection:
&[ref proj_base @ .., ProjectionElem::Field(upvar_index, _), ProjectionElem::Deref],
} => {
let place = PlaceRef { local, projection: proj_base };
// Not projected from the implicit `self` in a closure.
debug_assert!(
match place.local_or_deref_local() {
2019-05-27 23:03:38 +02:00
Some(local) => local == Local::new(1),
None => false,
},
"Unexpected capture place"
);
// Not in a closure
debug_assert!(
this.upvar_mutbls.len() > upvar_index.index(),
"Unexpected capture place"
);
this.upvar_mutbls[upvar_index.index()]
}
_ => bug!("Unexpected capture place"),
};
let borrow_kind = match mutability {
Mutability::Not => BorrowKind::Unique,
2019-12-24 17:38:22 -05:00
Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
};
this.cfg.push_assign(
block,
source_info,
Place::from(temp),
2019-04-25 22:05:04 +01:00
Rvalue::Ref(this.hir.tcx().lifetimes.re_erased, borrow_kind, arg_place),
);
// In constants, temp_lifetime is None. We should not need to drop
// anything because no values with a destructor can be created in
// a constant at this time, even if the type may need dropping.
if let Some(temp_lifetime) = temp_lifetime {
2019-12-24 17:38:22 -05:00
this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp);
}
block.and(Operand::Move(Place::from(temp)))
}
// Helper to get a `-1` value of the appropriate type
2017-09-14 21:44:23 -04:00
fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
2019-06-14 18:09:57 +02:00
let param_ty = ty::ParamEnv::empty().and(ty);
let bits = self.hir.tcx().layout_of(param_ty).unwrap().size.bits();
let n = (!0u128) >> (128 - bits);
let literal = ty::Const::from_bits(self.hir.tcx(), n, param_ty);
self.literal_operand(span, literal)
}
// Helper to get the minimum value of the appropriate type
2017-09-14 21:44:23 -04:00
fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
assert!(ty.is_signed());
2019-06-14 18:09:57 +02:00
let param_ty = ty::ParamEnv::empty().and(ty);
let bits = self.hir.tcx().layout_of(param_ty).unwrap().size.bits();
let n = 1 << (bits - 1);
let literal = ty::Const::from_bits(self.hir.tcx(), n, param_ty);
self.literal_operand(span, literal)
}
2015-08-18 17:59:21 -04:00
}