2016-06-23 01:03:58 -06:00
|
|
|
use rustc::hir::def_id::DefId;
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir;
|
2016-08-27 01:44:46 -06:00
|
|
|
use rustc::traits::{self, Reveal};
|
2016-06-23 01:03:58 -06:00
|
|
|
use rustc::ty::fold::TypeFoldable;
|
|
|
|
use rustc::ty::layout::Layout;
|
2016-11-04 15:55:05 +01:00
|
|
|
use rustc::ty::subst::{Substs, Kind};
|
2016-06-23 01:03:58 -06:00
|
|
|
use rustc::ty::{self, Ty, TyCtxt, BareFnTy};
|
|
|
|
use syntax::codemap::{DUMMY_SP, Span};
|
2016-09-19 02:19:31 -06:00
|
|
|
use syntax::{ast, attr};
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
use error::{EvalError, EvalResult};
|
2016-09-09 12:51:14 +02:00
|
|
|
use memory::Pointer;
|
2016-09-19 19:40:56 -06:00
|
|
|
use primval::PrimVal;
|
2016-11-04 15:55:05 +01:00
|
|
|
use super::{EvalContext, Lvalue, IntegerExt, StackPopCleanup, LvalueExtra, monomorphize_field_ty};
|
2016-09-23 10:27:14 +02:00
|
|
|
use super::value::Value;
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-09-20 16:05:30 +02:00
|
|
|
mod intrinsics;
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
impl<'a, 'tcx> EvalContext<'a, 'tcx> {
|
2016-07-06 17:55:05 +02:00
|
|
|
|
|
|
|
pub(super) fn goto_block(&mut self, target: mir::BasicBlock) {
|
|
|
|
self.frame_mut().block = target;
|
|
|
|
self.frame_mut().stmt = 0;
|
|
|
|
}
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
pub(super) fn eval_terminator(
|
|
|
|
&mut self,
|
|
|
|
terminator: &mir::Terminator<'tcx>,
|
|
|
|
) -> EvalResult<'tcx, ()> {
|
2016-11-03 10:38:08 +01:00
|
|
|
use rustc::mir::TerminatorKind::*;
|
2016-06-23 01:03:58 -06:00
|
|
|
match terminator.kind {
|
2016-11-26 19:13:22 -08:00
|
|
|
Return => {
|
|
|
|
self.dump_local(self.frame().return_lvalue);
|
|
|
|
self.pop_stack_frame()?
|
|
|
|
}
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-07-06 17:55:05 +02:00
|
|
|
Goto { target } => self.goto_block(target),
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
If { ref cond, targets: (then_target, else_target) } => {
|
2016-10-21 03:17:53 -06:00
|
|
|
let cond_val = self.eval_operand_to_primval(cond)?.try_as_bool()?;
|
2016-07-06 17:55:05 +02:00
|
|
|
self.goto_block(if cond_val { then_target } else { else_target });
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
SwitchInt { ref discr, ref values, ref targets, .. } => {
|
2016-10-15 23:59:01 -06:00
|
|
|
let discr_val = self.eval_and_read_lvalue(discr)?;
|
2016-06-23 01:03:58 -06:00
|
|
|
let discr_ty = self.lvalue_ty(discr);
|
2016-09-27 11:10:25 +02:00
|
|
|
let discr_prim = self.value_to_primval(discr_val, discr_ty)?;
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
// Branch to the `otherwise` case by default, if no match is found.
|
|
|
|
let mut target_block = targets[targets.len() - 1];
|
|
|
|
|
2016-09-19 02:19:31 -06:00
|
|
|
for (index, const_val) in values.iter().enumerate() {
|
2016-09-27 11:10:25 +02:00
|
|
|
let val = self.const_to_value(const_val)?;
|
|
|
|
let prim = self.value_to_primval(val, discr_ty)?;
|
|
|
|
if discr_prim == prim {
|
2016-06-23 01:03:58 -06:00
|
|
|
target_block = targets[index];
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-06 17:55:05 +02:00
|
|
|
self.goto_block(target_block);
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Switch { ref discr, ref targets, adt_def } => {
|
2016-10-15 19:48:30 -06:00
|
|
|
// FIXME(solson)
|
|
|
|
let lvalue = self.eval_lvalue(discr)?;
|
|
|
|
let lvalue = self.force_allocation(lvalue)?;
|
|
|
|
|
|
|
|
let adt_ptr = lvalue.to_ptr();
|
2016-06-23 01:03:58 -06:00
|
|
|
let adt_ty = self.lvalue_ty(discr);
|
|
|
|
let discr_val = self.read_discriminant_value(adt_ptr, adt_ty)?;
|
|
|
|
let matching = adt_def.variants.iter()
|
|
|
|
.position(|v| discr_val == v.disr_val.to_u64_unchecked());
|
|
|
|
|
|
|
|
match matching {
|
2016-07-06 17:55:05 +02:00
|
|
|
Some(i) => self.goto_block(targets[i]),
|
2016-06-23 01:03:58 -06:00
|
|
|
None => return Err(EvalError::InvalidDiscriminant),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Call { ref func, ref args, ref destination, .. } => {
|
2016-07-06 17:55:05 +02:00
|
|
|
let destination = match *destination {
|
2016-10-14 03:31:45 -06:00
|
|
|
Some((ref lv, target)) => Some((self.eval_lvalue(lv)?, target)),
|
2016-07-06 17:55:05 +02:00
|
|
|
None => None,
|
|
|
|
};
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
let func_ty = self.operand_ty(func);
|
|
|
|
match func_ty.sty {
|
|
|
|
ty::TyFnPtr(bare_fn_ty) => {
|
2016-11-15 14:12:49 +01:00
|
|
|
let fn_ptr = self.eval_operand_to_primval(func)?.to_ptr();
|
2016-11-15 17:19:37 +01:00
|
|
|
let (def_id, substs, abi, sig) = self.memory.get_fn(fn_ptr.alloc_id)?;
|
|
|
|
if abi != bare_fn_ty.abi || sig != bare_fn_ty.sig.skip_binder() {
|
|
|
|
return Err(EvalError::FunctionPointerTyMismatch(abi, sig, bare_fn_ty));
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
2016-07-06 17:55:05 +02:00
|
|
|
self.eval_fn_call(def_id, substs, bare_fn_ty, destination, args,
|
2016-06-23 01:03:58 -06:00
|
|
|
terminator.source_info.span)?
|
|
|
|
},
|
|
|
|
ty::TyFnDef(def_id, substs, fn_ty) => {
|
2016-07-06 17:55:05 +02:00
|
|
|
self.eval_fn_call(def_id, substs, fn_ty, destination, args,
|
2016-06-23 01:03:58 -06:00
|
|
|
terminator.source_info.span)?
|
|
|
|
}
|
|
|
|
|
|
|
|
_ => return Err(EvalError::Unimplemented(format!("can't handle callee of type {:?}", func_ty))),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Drop { ref location, target, .. } => {
|
2016-11-03 15:22:39 +01:00
|
|
|
let lval = self.eval_lvalue(location)?;
|
2016-10-15 19:48:30 -06:00
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
let ty = self.lvalue_ty(location);
|
2016-11-03 17:32:37 +01:00
|
|
|
|
|
|
|
// we can't generate the drop stack frames on the fly,
|
|
|
|
// because that would change our call stack
|
|
|
|
// and very much confuse the further processing of the drop glue
|
|
|
|
let mut drops = Vec::new();
|
|
|
|
self.drop(lval, ty, &mut drops)?;
|
2016-07-06 17:55:05 +02:00
|
|
|
self.goto_block(target);
|
2016-11-03 17:32:37 +01:00
|
|
|
self.eval_drop_impls(drops)?;
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Assert { ref cond, expected, ref msg, target, .. } => {
|
2016-10-21 03:17:53 -06:00
|
|
|
let cond_val = self.eval_operand_to_primval(cond)?.try_as_bool()?;
|
2016-09-19 02:19:31 -06:00
|
|
|
if expected == cond_val {
|
2016-07-06 17:55:05 +02:00
|
|
|
self.goto_block(target);
|
2016-06-23 01:03:58 -06:00
|
|
|
} else {
|
|
|
|
return match *msg {
|
|
|
|
mir::AssertMessage::BoundsCheck { ref len, ref index } => {
|
2016-09-19 19:01:28 -06:00
|
|
|
let span = terminator.source_info.span;
|
|
|
|
let len = self.eval_operand_to_primval(len).expect("can't eval len")
|
|
|
|
.expect_uint("BoundsCheck len wasn't a uint");
|
|
|
|
let index = self.eval_operand_to_primval(index)
|
|
|
|
.expect("can't eval index")
|
|
|
|
.expect_uint("BoundsCheck index wasn't a uint");
|
|
|
|
Err(EvalError::ArrayIndexOutOfBounds(span, len, index))
|
2016-06-23 01:03:58 -06:00
|
|
|
},
|
2016-09-19 19:01:28 -06:00
|
|
|
mir::AssertMessage::Math(ref err) =>
|
|
|
|
Err(EvalError::Math(terminator.source_info.span, err.clone())),
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
|
|
|
DropAndReplace { .. } => unimplemented!(),
|
|
|
|
Resume => unimplemented!(),
|
|
|
|
Unreachable => unimplemented!(),
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-11-17 14:48:34 +01:00
|
|
|
pub fn eval_drop_impls(&mut self, drops: Vec<(DefId, Value, &'tcx Substs<'tcx>)>) -> EvalResult<'tcx, ()> {
|
2016-11-03 17:32:37 +01:00
|
|
|
let span = self.frame().span;
|
2016-11-04 15:55:05 +01:00
|
|
|
// add them to the stack in reverse order, because the impl that needs to run the last
|
|
|
|
// is the one that needs to be at the bottom of the stack
|
|
|
|
for (drop_def_id, self_arg, substs) in drops.into_iter().rev() {
|
2016-11-03 17:32:37 +01:00
|
|
|
// FIXME: supply a real span
|
|
|
|
let mir = self.load_mir(drop_def_id)?;
|
|
|
|
trace!("substs for drop glue: {:?}", substs);
|
|
|
|
self.push_stack_frame(
|
|
|
|
drop_def_id,
|
|
|
|
span,
|
|
|
|
mir,
|
|
|
|
substs,
|
|
|
|
Lvalue::from_ptr(Pointer::zst_ptr()),
|
|
|
|
StackPopCleanup::None,
|
|
|
|
)?;
|
|
|
|
let mut arg_locals = self.frame().mir.args_iter();
|
|
|
|
let first = arg_locals.next().expect("drop impl has self arg");
|
|
|
|
assert!(arg_locals.next().is_none(), "drop impl should have only one arg");
|
|
|
|
let dest = self.eval_lvalue(&mir::Lvalue::Local(first))?;
|
|
|
|
let ty = self.frame().mir.local_decls[first].ty;
|
2016-11-04 15:55:05 +01:00
|
|
|
self.write_value(self_arg, dest, ty)?;
|
2016-11-03 17:32:37 +01:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
fn eval_fn_call(
|
|
|
|
&mut self,
|
|
|
|
def_id: DefId,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
fn_ty: &'tcx BareFnTy,
|
2016-10-21 10:29:56 +02:00
|
|
|
destination: Option<(Lvalue<'tcx>, mir::BasicBlock)>,
|
2016-09-19 19:40:56 -06:00
|
|
|
arg_operands: &[mir::Operand<'tcx>],
|
2016-06-23 01:03:58 -06:00
|
|
|
span: Span,
|
|
|
|
) -> EvalResult<'tcx, ()> {
|
|
|
|
use syntax::abi::Abi;
|
|
|
|
match fn_ty.abi {
|
|
|
|
Abi::RustIntrinsic => {
|
2016-08-27 01:44:46 -06:00
|
|
|
let ty = fn_ty.sig.0.output;
|
2016-11-17 17:23:40 +01:00
|
|
|
let layout = self.type_layout(ty)?;
|
2016-07-06 17:55:05 +02:00
|
|
|
let (ret, target) = destination.unwrap();
|
2016-11-04 17:51:13 +01:00
|
|
|
self.call_intrinsic(def_id, substs, arg_operands, ret, ty, layout, target)?;
|
2016-07-06 17:55:05 +02:00
|
|
|
Ok(())
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Abi::C => {
|
2016-08-27 01:44:46 -06:00
|
|
|
let ty = fn_ty.sig.0.output;
|
2016-11-17 17:23:40 +01:00
|
|
|
let size = self.type_size(ty)?.expect("function return type cannot be unsized");
|
2016-07-06 17:55:05 +02:00
|
|
|
let (ret, target) = destination.unwrap();
|
2016-09-19 19:40:56 -06:00
|
|
|
self.call_c_abi(def_id, arg_operands, ret, size)?;
|
2016-07-06 17:55:05 +02:00
|
|
|
self.goto_block(target);
|
|
|
|
Ok(())
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Abi::Rust | Abi::RustCall => {
|
2016-09-19 19:40:56 -06:00
|
|
|
let mut args = Vec::new();
|
|
|
|
for arg in arg_operands {
|
|
|
|
let arg_val = self.eval_operand(arg)?;
|
|
|
|
let arg_ty = self.operand_ty(arg);
|
|
|
|
args.push((arg_val, arg_ty));
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
2016-09-09 12:51:14 +02:00
|
|
|
// Only trait methods can have a Self parameter.
|
|
|
|
let (resolved_def_id, resolved_substs) =
|
|
|
|
if let Some(trait_id) = self.tcx.trait_of_item(def_id) {
|
2016-09-19 19:40:56 -06:00
|
|
|
self.trait_method(trait_id, def_id, substs, &mut args)?
|
2016-09-09 12:51:14 +02:00
|
|
|
} else {
|
|
|
|
(def_id, substs)
|
|
|
|
};
|
|
|
|
|
2016-09-27 17:01:06 +02:00
|
|
|
let mir = self.load_mir(resolved_def_id)?;
|
2016-10-14 03:31:45 -06:00
|
|
|
let (return_lvalue, return_to_block) = match destination {
|
|
|
|
Some((lvalue, block)) => (lvalue, StackPopCleanup::Goto(block)),
|
|
|
|
None => {
|
|
|
|
// FIXME(solson)
|
|
|
|
let lvalue = Lvalue::from_ptr(Pointer::never_ptr());
|
|
|
|
(lvalue, StackPopCleanup::None)
|
|
|
|
}
|
2016-07-06 17:55:05 +02:00
|
|
|
};
|
2016-10-14 03:31:45 -06:00
|
|
|
|
|
|
|
self.push_stack_frame(
|
|
|
|
resolved_def_id,
|
|
|
|
span,
|
|
|
|
mir,
|
|
|
|
resolved_substs,
|
|
|
|
return_lvalue,
|
2016-11-03 10:38:08 +01:00
|
|
|
return_to_block,
|
2016-10-14 03:31:45 -06:00
|
|
|
)?;
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-10-15 19:48:30 -06:00
|
|
|
let arg_locals = self.frame().mir.args_iter();
|
|
|
|
for (arg_local, (arg_val, arg_ty)) in arg_locals.zip(args) {
|
2016-10-15 23:31:42 -06:00
|
|
|
let dest = self.eval_lvalue(&mir::Lvalue::Local(arg_local))?;
|
2016-09-19 19:40:56 -06:00
|
|
|
self.write_value(arg_val, dest, arg_ty)?;
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
abi => Err(EvalError::Unimplemented(format!("can't handle function with {:?} ABI", abi))),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_discriminant_value(&self, adt_ptr: Pointer, adt_ty: Ty<'tcx>) -> EvalResult<'tcx, u64> {
|
|
|
|
use rustc::ty::layout::Layout::*;
|
2016-11-17 17:23:40 +01:00
|
|
|
let adt_layout = self.type_layout(adt_ty)?;
|
2016-11-17 14:48:34 +01:00
|
|
|
trace!("read_discriminant_value {:?}", adt_layout);
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
let discr_val = match *adt_layout {
|
2016-11-05 02:53:02 +00:00
|
|
|
General { discr, .. } | CEnum { discr, signed: false, .. } => {
|
2016-06-23 01:03:58 -06:00
|
|
|
let discr_size = discr.size().bytes();
|
2016-11-18 12:55:14 +01:00
|
|
|
self.memory.read_uint(adt_ptr, discr_size)?
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
2016-11-05 02:53:02 +00:00
|
|
|
CEnum { discr, signed: true, .. } => {
|
|
|
|
let discr_size = discr.size().bytes();
|
2016-11-18 12:55:14 +01:00
|
|
|
self.memory.read_int(adt_ptr, discr_size)? as u64
|
2016-11-05 02:53:02 +00:00
|
|
|
}
|
|
|
|
|
2016-11-11 13:10:47 +01:00
|
|
|
RawNullablePointer { nndiscr, value } => {
|
2016-11-18 12:55:14 +01:00
|
|
|
let discr_size = value.size(&self.tcx.data_layout).bytes();
|
2016-11-17 14:48:34 +01:00
|
|
|
trace!("rawnullablepointer with size {}", discr_size);
|
2016-11-11 13:10:47 +01:00
|
|
|
self.read_nonnull_discriminant_value(adt_ptr, nndiscr, discr_size)?
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
|
2016-11-11 13:10:47 +01:00
|
|
|
let (offset, ty) = self.nonnull_offset_and_ty(adt_ty, nndiscr, discrfield)?;
|
2016-11-18 12:55:14 +01:00
|
|
|
let nonnull = adt_ptr.offset(offset.bytes());
|
2016-11-17 14:48:34 +01:00
|
|
|
trace!("struct wrapped nullable pointer type: {}", ty);
|
2016-11-11 13:10:47 +01:00
|
|
|
// only the pointer part of a fat pointer is used for this space optimization
|
2016-11-17 17:23:40 +01:00
|
|
|
let discr_size = self.type_size(ty)?.expect("bad StructWrappedNullablePointer discrfield");
|
2016-11-11 13:10:47 +01:00
|
|
|
self.read_nonnull_discriminant_value(nonnull, nndiscr, discr_size)?
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// The discriminant_value intrinsic returns 0 for non-sum types.
|
|
|
|
Array { .. } | FatPointer { .. } | Scalar { .. } | Univariant { .. } |
|
2016-09-07 10:12:15 +02:00
|
|
|
Vector { .. } | UntaggedUnion { .. } => 0,
|
2016-06-23 01:03:58 -06:00
|
|
|
};
|
|
|
|
|
|
|
|
Ok(discr_val)
|
|
|
|
}
|
|
|
|
|
2016-11-18 12:55:14 +01:00
|
|
|
fn read_nonnull_discriminant_value(&self, ptr: Pointer, nndiscr: u64, discr_size: u64) -> EvalResult<'tcx, u64> {
|
2016-11-11 13:10:47 +01:00
|
|
|
let not_null = match self.memory.read_uint(ptr, discr_size) {
|
2016-06-23 01:03:58 -06:00
|
|
|
Ok(0) => false,
|
|
|
|
Ok(_) | Err(EvalError::ReadPointerAsBytes) => true,
|
|
|
|
Err(e) => return Err(e),
|
|
|
|
};
|
|
|
|
assert!(nndiscr == 0 || nndiscr == 1);
|
|
|
|
Ok(if not_null { nndiscr } else { 1 - nndiscr })
|
|
|
|
}
|
|
|
|
|
|
|
|
fn call_c_abi(
|
|
|
|
&mut self,
|
|
|
|
def_id: DefId,
|
|
|
|
args: &[mir::Operand<'tcx>],
|
2016-10-21 10:29:56 +02:00
|
|
|
dest: Lvalue<'tcx>,
|
2016-11-18 12:55:14 +01:00
|
|
|
dest_size: u64,
|
2016-06-23 01:03:58 -06:00
|
|
|
) -> EvalResult<'tcx, ()> {
|
|
|
|
let name = self.tcx.item_name(def_id);
|
|
|
|
let attrs = self.tcx.get_attrs(def_id);
|
2016-11-26 17:36:31 -08:00
|
|
|
let link_name = attr::first_attr_value_str_by_name(&attrs, "link_name")
|
|
|
|
.unwrap_or(name)
|
|
|
|
.as_str();
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-09-23 10:27:14 +02:00
|
|
|
let args_res: EvalResult<Vec<Value>> = args.iter()
|
|
|
|
.map(|arg| self.eval_operand(arg))
|
2016-06-23 01:03:58 -06:00
|
|
|
.collect();
|
|
|
|
let args = args_res?;
|
|
|
|
|
|
|
|
if link_name.starts_with("pthread_") {
|
|
|
|
warn!("ignoring C ABI call: {}", link_name);
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2016-09-23 10:27:14 +02:00
|
|
|
let usize = self.tcx.types.usize;
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
match &link_name[..] {
|
|
|
|
"__rust_allocate" => {
|
2016-10-14 03:31:45 -06:00
|
|
|
let size = self.value_to_primval(args[0], usize)?
|
|
|
|
.expect_uint("__rust_allocate first arg not usize");
|
|
|
|
let align = self.value_to_primval(args[1], usize)?
|
|
|
|
.expect_uint("__rust_allocate second arg not usize");
|
2016-11-18 12:55:14 +01:00
|
|
|
let ptr = self.memory.allocate(size, align)?;
|
2016-10-20 04:42:19 -06:00
|
|
|
self.write_primval(dest, PrimVal::from_ptr(ptr))?;
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
2016-11-03 17:32:06 +01:00
|
|
|
"__rust_deallocate" => {
|
|
|
|
let ptr = args[0].read_ptr(&self.memory)?;
|
|
|
|
// FIXME: insert sanity check for size and align?
|
|
|
|
let _old_size = self.value_to_primval(args[1], usize)?
|
|
|
|
.expect_uint("__rust_deallocate second arg not usize");
|
|
|
|
let _align = self.value_to_primval(args[2], usize)?
|
|
|
|
.expect_uint("__rust_deallocate third arg not usize");
|
|
|
|
self.memory.deallocate(ptr)?;
|
|
|
|
},
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
"__rust_reallocate" => {
|
2016-09-23 10:27:14 +02:00
|
|
|
let ptr = args[0].read_ptr(&self.memory)?;
|
|
|
|
let size = self.value_to_primval(args[2], usize)?.expect_uint("__rust_reallocate third arg not usize");
|
|
|
|
let align = self.value_to_primval(args[3], usize)?.expect_uint("__rust_reallocate fourth arg not usize");
|
2016-11-18 12:55:14 +01:00
|
|
|
let new_ptr = self.memory.reallocate(ptr, size, align)?;
|
2016-10-20 04:42:19 -06:00
|
|
|
self.write_primval(dest, PrimVal::from_ptr(new_ptr))?;
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
"memcmp" => {
|
2016-09-23 10:27:14 +02:00
|
|
|
let left = args[0].read_ptr(&self.memory)?;
|
|
|
|
let right = args[1].read_ptr(&self.memory)?;
|
2016-11-18 12:55:14 +01:00
|
|
|
let n = self.value_to_primval(args[2], usize)?.expect_uint("__rust_reallocate first arg not usize");
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
let result = {
|
|
|
|
let left_bytes = self.memory.read_bytes(left, n)?;
|
|
|
|
let right_bytes = self.memory.read_bytes(right, n)?;
|
|
|
|
|
|
|
|
use std::cmp::Ordering::*;
|
|
|
|
match left_bytes.cmp(right_bytes) {
|
|
|
|
Less => -1,
|
|
|
|
Equal => 0,
|
|
|
|
Greater => 1,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2016-10-20 04:42:19 -06:00
|
|
|
self.write_primval(dest, PrimVal::from_int_with_size(result, dest_size))?;
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
_ => {
|
|
|
|
return Err(EvalError::Unimplemented(format!("can't call C ABI function: {}", link_name)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Since we pushed no stack frame, the main loop will act
|
|
|
|
// as if the call just completed and it's returning to the
|
|
|
|
// current frame.
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2016-09-10 15:14:49 +02:00
|
|
|
pub(super) fn fulfill_obligation(&self, trait_ref: ty::PolyTraitRef<'tcx>) -> traits::Vtable<'tcx, ()> {
|
2016-06-23 01:03:58 -06:00
|
|
|
// Do the initial selection for the obligation. This yields the shallow result we are
|
|
|
|
// looking for -- that is, what specific impl.
|
2016-09-06 16:04:51 +02:00
|
|
|
self.tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
|
2016-06-23 01:03:58 -06:00
|
|
|
let mut selcx = traits::SelectionContext::new(&infcx);
|
|
|
|
|
|
|
|
let obligation = traits::Obligation::new(
|
|
|
|
traits::ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID),
|
|
|
|
trait_ref.to_poly_trait_predicate(),
|
|
|
|
);
|
|
|
|
let selection = selcx.select(&obligation).unwrap().unwrap();
|
|
|
|
|
|
|
|
// Currently, we use a fulfillment context to completely resolve all nested obligations.
|
|
|
|
// This is because they can inform the inference of the impl's type parameters.
|
|
|
|
let mut fulfill_cx = traits::FulfillmentContext::new();
|
|
|
|
let vtable = selection.map(|predicate| {
|
|
|
|
fulfill_cx.register_predicate_obligation(&infcx, predicate);
|
|
|
|
});
|
|
|
|
infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &vtable)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2016-11-17 17:23:40 +01:00
|
|
|
fn unpack_fn_args(&self, args: &mut Vec<(Value, Ty<'tcx>)>) -> EvalResult<'tcx, ()> {
|
2016-09-21 15:57:13 +02:00
|
|
|
if let Some((last, last_ty)) = args.pop() {
|
2016-11-17 17:23:40 +01:00
|
|
|
let last_layout = self.type_layout(last_ty)?;
|
2016-09-21 15:57:13 +02:00
|
|
|
match (&last_ty.sty, last_layout) {
|
|
|
|
(&ty::TyTuple(fields),
|
|
|
|
&Layout::Univariant { ref variant, .. }) => {
|
2016-10-03 20:45:50 -06:00
|
|
|
let offsets = variant.offsets.iter().map(|s| s.bytes());
|
2016-09-19 19:40:56 -06:00
|
|
|
let last_ptr = match last {
|
|
|
|
Value::ByRef(ptr) => ptr,
|
|
|
|
_ => bug!("rust-call ABI tuple argument wasn't Value::ByRef"),
|
|
|
|
};
|
2016-09-21 15:57:13 +02:00
|
|
|
for (offset, ty) in offsets.zip(fields) {
|
2016-11-18 12:55:14 +01:00
|
|
|
let arg = Value::ByRef(last_ptr.offset(offset));
|
2016-09-19 19:40:56 -06:00
|
|
|
args.push((arg, ty));
|
2016-09-21 15:57:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
ty => bug!("expected tuple as last argument in function with 'rust-call' ABI, got {:?}", ty),
|
|
|
|
}
|
|
|
|
}
|
2016-11-17 17:23:40 +01:00
|
|
|
Ok(())
|
2016-09-21 15:57:13 +02:00
|
|
|
}
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
/// Trait method, which has to be resolved to an impl method.
|
|
|
|
fn trait_method(
|
2016-09-20 18:31:55 +02:00
|
|
|
&mut self,
|
2016-08-27 01:44:46 -06:00
|
|
|
trait_id: DefId,
|
2016-06-23 01:03:58 -06:00
|
|
|
def_id: DefId,
|
2016-09-09 12:51:14 +02:00
|
|
|
substs: &'tcx Substs<'tcx>,
|
2016-09-19 19:40:56 -06:00
|
|
|
args: &mut Vec<(Value, Ty<'tcx>)>,
|
2016-09-09 12:51:14 +02:00
|
|
|
) -> EvalResult<'tcx, (DefId, &'tcx Substs<'tcx>)> {
|
2016-08-27 01:44:46 -06:00
|
|
|
let trait_ref = ty::TraitRef::from_method(self.tcx, trait_id, substs);
|
|
|
|
let trait_ref = self.tcx.normalize_associated_type(&ty::Binder(trait_ref));
|
|
|
|
|
2016-06-23 01:03:58 -06:00
|
|
|
match self.fulfill_obligation(trait_ref) {
|
|
|
|
traits::VtableImpl(vtable_impl) => {
|
|
|
|
let impl_did = vtable_impl.impl_def_id;
|
|
|
|
let mname = self.tcx.item_name(def_id);
|
|
|
|
// Create a concatenated set of substitutions which includes those from the impl
|
|
|
|
// and those from the method:
|
2016-09-20 12:52:01 +02:00
|
|
|
let (did, substs) = find_method(self.tcx, substs, impl_did, vtable_impl.substs, mname);
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-09-20 12:52:01 +02:00
|
|
|
Ok((did, substs))
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
2016-09-20 18:31:55 +02:00
|
|
|
traits::VtableClosure(vtable_closure) => {
|
|
|
|
let trait_closure_kind = self.tcx
|
|
|
|
.lang_items
|
|
|
|
.fn_trait_kind(trait_id)
|
|
|
|
.expect("The substitutions should have no type parameters remaining after passing through fulfill_obligation");
|
|
|
|
let closure_kind = self.tcx.closure_kind(vtable_closure.closure_def_id);
|
|
|
|
trace!("closures {:?}, {:?}", closure_kind, trait_closure_kind);
|
2016-11-17 17:23:40 +01:00
|
|
|
self.unpack_fn_args(args)?;
|
2016-09-20 18:31:55 +02:00
|
|
|
match (closure_kind, trait_closure_kind) {
|
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
|
|
|
|
(ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
|
|
|
|
(ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) |
|
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {} // No adapter needed.
|
2016-09-19 19:40:56 -06:00
|
|
|
|
2016-09-20 18:31:55 +02:00
|
|
|
(ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) |
|
|
|
|
(ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
|
|
|
|
// The closure fn is a `fn(&self, ...)` or `fn(&mut self, ...)`.
|
|
|
|
// We want a `fn(self, ...)`.
|
|
|
|
// We can produce this by doing something like:
|
|
|
|
//
|
|
|
|
// fn call_once(self, ...) { call_mut(&self, ...) }
|
|
|
|
// fn call_once(mut self, ...) { call_mut(&mut self, ...) }
|
|
|
|
//
|
|
|
|
// These are both the same at trans time.
|
|
|
|
|
2016-09-19 19:40:56 -06:00
|
|
|
// Interpreter magic: insert an intermediate pointer, so we can skip the
|
|
|
|
// intermediate function call.
|
|
|
|
// FIXME: this is a memory leak, should probably add the pointer to the
|
|
|
|
// current stack.
|
2016-09-23 10:27:14 +02:00
|
|
|
let first = self.value_to_ptr_dont_use(args[0].0, args[0].1)?;
|
2016-10-20 04:42:19 -06:00
|
|
|
args[0].0 = Value::ByVal(PrimVal::from_ptr(first));
|
2016-09-19 19:40:56 -06:00
|
|
|
args[0].1 = self.tcx.mk_mut_ptr(args[0].1);
|
2016-09-20 18:31:55 +02:00
|
|
|
}
|
2016-09-19 19:40:56 -06:00
|
|
|
|
2016-09-20 18:31:55 +02:00
|
|
|
_ => bug!("cannot convert {:?} to {:?}", closure_kind, trait_closure_kind),
|
|
|
|
}
|
2016-11-17 11:31:28 +01:00
|
|
|
Ok((vtable_closure.closure_def_id, vtable_closure.substs.substs))
|
2016-09-20 18:31:55 +02:00
|
|
|
}
|
2016-06-23 01:03:58 -06:00
|
|
|
|
2016-09-16 10:23:04 +02:00
|
|
|
traits::VtableFnPointer(vtable_fn_ptr) => {
|
|
|
|
if let ty::TyFnDef(did, ref substs, _) = vtable_fn_ptr.fn_ty.sty {
|
2016-09-15 16:14:53 +02:00
|
|
|
args.remove(0);
|
2016-11-17 17:23:40 +01:00
|
|
|
self.unpack_fn_args(args)?;
|
2016-09-15 16:14:53 +02:00
|
|
|
Ok((did, substs))
|
|
|
|
} else {
|
2016-09-16 10:28:43 +02:00
|
|
|
bug!("VtableFnPointer did not contain a concrete function: {:?}", vtable_fn_ptr)
|
2016-09-15 16:14:53 +02:00
|
|
|
}
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
2016-09-09 12:51:14 +02:00
|
|
|
traits::VtableObject(ref data) => {
|
2016-11-18 12:55:14 +01:00
|
|
|
let idx = self.tcx.get_vtable_index_of_object_method(data, def_id) as u64;
|
2016-09-19 19:40:56 -06:00
|
|
|
if let Some(&mut(ref mut first_arg, ref mut first_ty)) = args.get_mut(0) {
|
2016-10-16 02:12:26 -06:00
|
|
|
let (self_ptr, vtable) = first_arg.expect_ptr_vtable_pair(&self.memory)?;
|
2016-10-20 04:42:19 -06:00
|
|
|
*first_arg = Value::ByVal(PrimVal::from_ptr(self_ptr));
|
2016-09-09 12:51:14 +02:00
|
|
|
let idx = idx + 3;
|
|
|
|
let offset = idx * self.memory.pointer_size();
|
2016-11-18 12:55:14 +01:00
|
|
|
let fn_ptr = self.memory.read_ptr(vtable.offset(offset))?;
|
2016-11-15 17:19:37 +01:00
|
|
|
let (def_id, substs, _abi, sig) = self.memory.get_fn(fn_ptr.alloc_id)?;
|
|
|
|
*first_ty = sig.inputs[0];
|
2016-09-09 12:51:14 +02:00
|
|
|
Ok((def_id, substs))
|
|
|
|
} else {
|
|
|
|
Err(EvalError::VtableForArgumentlessMethod)
|
|
|
|
}
|
|
|
|
},
|
2016-09-06 16:16:49 +02:00
|
|
|
vtable => bug!("resolved vtable bad vtable {:?} in trans", vtable),
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub(super) fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
|
|
|
|
self.tcx.type_needs_drop_given_env(ty, &self.tcx.empty_parameter_environment())
|
|
|
|
}
|
|
|
|
|
2016-11-03 17:32:37 +01:00
|
|
|
/// push DefIds of drop impls and their argument on the given vector
|
2016-11-17 14:48:34 +01:00
|
|
|
pub fn drop(
|
2016-11-03 17:32:37 +01:00
|
|
|
&mut self,
|
|
|
|
lval: Lvalue<'tcx>,
|
|
|
|
ty: Ty<'tcx>,
|
2016-11-04 15:55:05 +01:00
|
|
|
drop: &mut Vec<(DefId, Value, &'tcx Substs<'tcx>)>,
|
2016-11-03 17:32:37 +01:00
|
|
|
) -> EvalResult<'tcx, ()> {
|
2016-06-23 01:03:58 -06:00
|
|
|
if !self.type_needs_drop(ty) {
|
|
|
|
debug!("no need to drop {:?}", ty);
|
|
|
|
return Ok(());
|
|
|
|
}
|
2016-11-03 15:22:39 +01:00
|
|
|
trace!("-need to drop {:?} at {:?}", ty, lval);
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
match ty.sty {
|
2016-11-04 09:15:31 +01:00
|
|
|
// special case `Box` to deallocate the inner allocation
|
|
|
|
ty::TyBox(contents_ty) => {
|
|
|
|
let val = self.read_lvalue(lval)?;
|
2016-11-04 15:55:05 +01:00
|
|
|
// we are going through the read_value path, because that already does all the
|
|
|
|
// checks for the trait object types. We'd only be repeating ourselves here.
|
|
|
|
let val = self.follow_by_ref_value(val, ty)?;
|
|
|
|
trace!("box dealloc on {:?}", val);
|
|
|
|
match val {
|
|
|
|
Value::ByRef(_) => bug!("follow_by_ref_value can't result in ByRef"),
|
|
|
|
Value::ByVal(ptr) => {
|
|
|
|
assert!(self.type_is_sized(contents_ty));
|
2016-11-15 14:12:49 +01:00
|
|
|
let contents_ptr = ptr.to_ptr();
|
2016-11-04 15:55:05 +01:00
|
|
|
self.drop(Lvalue::from_ptr(contents_ptr), contents_ty, drop)?;
|
|
|
|
},
|
|
|
|
Value::ByValPair(prim_ptr, extra) => {
|
2016-11-15 14:12:49 +01:00
|
|
|
let ptr = prim_ptr.to_ptr();
|
|
|
|
let extra = match self.tcx.struct_tail(contents_ty).sty {
|
|
|
|
ty::TyTrait(_) => LvalueExtra::Vtable(extra.to_ptr()),
|
|
|
|
ty::TyStr | ty::TySlice(_) => LvalueExtra::Length(extra.try_as_uint()?),
|
|
|
|
_ => bug!("invalid fat pointer type: {}", ty),
|
2016-11-04 15:55:05 +01:00
|
|
|
};
|
|
|
|
self.drop(
|
|
|
|
Lvalue::Ptr {
|
|
|
|
ptr: ptr,
|
|
|
|
extra: extra,
|
|
|
|
},
|
|
|
|
contents_ty,
|
|
|
|
drop,
|
|
|
|
)?;
|
|
|
|
},
|
|
|
|
}
|
|
|
|
let box_free_fn = self.tcx.lang_items.box_free_fn().expect("no box_free lang item");
|
|
|
|
let substs = self.tcx.intern_substs(&[Kind::from(contents_ty)]);
|
|
|
|
// this is somewhat hacky, but hey, there's no representation difference between
|
|
|
|
// pointers and references, so
|
|
|
|
// #[lang = "box_free"] unsafe fn box_free<T>(ptr: *mut T)
|
|
|
|
// is the same as
|
|
|
|
// fn drop(&mut self) if Self is Box<T>
|
|
|
|
drop.push((box_free_fn, val, substs));
|
2016-11-04 09:15:31 +01:00
|
|
|
},
|
|
|
|
|
2016-11-03 15:22:39 +01:00
|
|
|
ty::TyAdt(adt_def, substs) => {
|
|
|
|
// FIXME: some structs are represented as ByValPair
|
2016-11-04 15:55:05 +01:00
|
|
|
let lval = self.force_allocation(lval)?;
|
|
|
|
let adt_ptr = match lval {
|
|
|
|
Lvalue::Ptr { ptr, .. } => ptr,
|
|
|
|
_ => bug!("force allocation can only yield Lvalue::Ptr"),
|
|
|
|
};
|
2016-11-03 17:32:37 +01:00
|
|
|
// run drop impl before the fields' drop impls
|
|
|
|
if let Some(drop_def_id) = adt_def.destructor() {
|
2016-11-04 15:55:05 +01:00
|
|
|
drop.push((drop_def_id, Value::ByVal(PrimVal::from_ptr(adt_ptr)), substs));
|
2016-11-03 17:32:37 +01:00
|
|
|
}
|
2016-11-17 17:23:40 +01:00
|
|
|
let layout = self.type_layout(ty)?;
|
2016-11-03 15:55:09 +01:00
|
|
|
let fields = match *layout {
|
|
|
|
Layout::Univariant { ref variant, .. } => {
|
|
|
|
adt_def.struct_variant().fields.iter().zip(&variant.offsets)
|
|
|
|
},
|
|
|
|
Layout::General { ref variants, .. } => {
|
|
|
|
let discr_val = self.read_discriminant_value(adt_ptr, ty)?;
|
|
|
|
match adt_def.variants.iter().position(|v| discr_val == v.disr_val.to_u64_unchecked()) {
|
|
|
|
// start at offset 1, to skip over the discriminant
|
|
|
|
Some(i) => adt_def.variants[i].fields.iter().zip(&variants[i].offsets[1..]),
|
|
|
|
None => return Err(EvalError::InvalidDiscriminant),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } => {
|
|
|
|
let discr = self.read_discriminant_value(adt_ptr, ty)?;
|
|
|
|
if discr == nndiscr {
|
2016-11-18 12:55:14 +01:00
|
|
|
assert_eq!(discr as usize as u64, discr);
|
2016-11-03 15:55:09 +01:00
|
|
|
adt_def.variants[discr as usize].fields.iter().zip(&nonnull.offsets)
|
|
|
|
} else {
|
|
|
|
// FIXME: the zst variant might contain zst types that impl Drop
|
|
|
|
return Ok(()); // nothing to do, this is zero sized (e.g. `None`)
|
|
|
|
}
|
|
|
|
},
|
2016-11-04 09:34:54 +01:00
|
|
|
Layout::RawNullablePointer { nndiscr, .. } => {
|
|
|
|
let discr = self.read_discriminant_value(adt_ptr, ty)?;
|
|
|
|
if discr == nndiscr {
|
2016-11-18 12:55:14 +01:00
|
|
|
assert_eq!(discr as usize as u64, discr);
|
2016-11-04 09:34:54 +01:00
|
|
|
assert_eq!(adt_def.variants[discr as usize].fields.len(), 1);
|
|
|
|
let field_ty = &adt_def.variants[discr as usize].fields[0];
|
2016-11-04 15:55:05 +01:00
|
|
|
let field_ty = monomorphize_field_ty(self.tcx, field_ty, substs);
|
2016-11-04 09:34:54 +01:00
|
|
|
// FIXME: once read_discriminant_value works with lvalue, don't force
|
|
|
|
// alloc in the RawNullablePointer case
|
2016-11-04 15:55:05 +01:00
|
|
|
self.drop(lval, field_ty, drop)?;
|
2016-11-04 09:34:54 +01:00
|
|
|
return Ok(());
|
|
|
|
} else {
|
2016-11-03 15:55:09 +01:00
|
|
|
// FIXME: the zst variant might contain zst types that impl Drop
|
|
|
|
return Ok(()); // nothing to do, this is zero sized (e.g. `None`)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => bug!("{:?} is not an adt layout", layout),
|
|
|
|
};
|
2016-11-04 15:55:05 +01:00
|
|
|
let tcx = self.tcx;
|
|
|
|
self.drop_fields(
|
|
|
|
fields.map(|(ty, &offset)| (monomorphize_field_ty(tcx, ty, substs), offset)),
|
|
|
|
lval,
|
|
|
|
drop,
|
|
|
|
)?;
|
2016-11-03 15:22:39 +01:00
|
|
|
},
|
|
|
|
ty::TyTuple(fields) => {
|
2016-11-17 17:23:40 +01:00
|
|
|
let offsets = match *self.type_layout(ty)? {
|
2016-11-04 15:55:05 +01:00
|
|
|
Layout::Univariant { ref variant, .. } => &variant.offsets,
|
|
|
|
_ => bug!("tuples must be univariant"),
|
|
|
|
};
|
|
|
|
self.drop_fields(fields.iter().cloned().zip(offsets.iter().cloned()), lval, drop)?;
|
2016-11-03 15:22:39 +01:00
|
|
|
},
|
2016-11-04 16:38:04 +01:00
|
|
|
ty::TyTrait(_) => {
|
|
|
|
let (ptr, vtable) = match lval {
|
|
|
|
Lvalue::Ptr { ptr, extra: LvalueExtra::Vtable(vtable) } => (ptr, vtable),
|
|
|
|
_ => bug!("expected an lvalue with a vtable"),
|
|
|
|
};
|
|
|
|
let drop_fn = self.memory.read_ptr(vtable)?;
|
|
|
|
// some values don't need to call a drop impl, so the value is null
|
2016-11-10 19:20:11 +01:00
|
|
|
if drop_fn != Pointer::from_int(0) {
|
2016-11-15 17:19:37 +01:00
|
|
|
let (def_id, substs, _abi, sig) = self.memory.get_fn(drop_fn.alloc_id)?;
|
|
|
|
let real_ty = sig.inputs[0];
|
2016-11-04 16:38:04 +01:00
|
|
|
self.drop(Lvalue::from_ptr(ptr), real_ty, drop)?;
|
|
|
|
drop.push((def_id, Value::ByVal(PrimVal::from_ptr(ptr)), substs));
|
|
|
|
} else {
|
|
|
|
// just a sanity check
|
|
|
|
assert_eq!(drop_fn.offset, 0);
|
|
|
|
}
|
2016-11-04 16:51:43 +01:00
|
|
|
},
|
|
|
|
ty::TySlice(elem_ty) => {
|
|
|
|
let (ptr, len) = match lval {
|
2016-11-18 12:55:14 +01:00
|
|
|
Lvalue::Ptr { ptr, extra: LvalueExtra::Length(len) } => (ptr, len),
|
2016-11-04 16:51:43 +01:00
|
|
|
_ => bug!("expected an lvalue with a length"),
|
|
|
|
};
|
2016-11-18 12:55:14 +01:00
|
|
|
let size = self.type_size(elem_ty)?.expect("slice element must be sized");
|
2016-11-04 16:51:43 +01:00
|
|
|
// FIXME: this creates a lot of stack frames if the element type has
|
|
|
|
// a drop impl
|
|
|
|
for i in 0..len {
|
|
|
|
self.drop(Lvalue::from_ptr(ptr.offset(i * size)), elem_ty, drop)?;
|
2016-11-03 15:22:39 +01:00
|
|
|
}
|
|
|
|
},
|
2016-11-04 17:34:33 +01:00
|
|
|
ty::TyArray(elem_ty, len) => {
|
|
|
|
let lval = self.force_allocation(lval)?;
|
|
|
|
let (ptr, extra) = match lval {
|
|
|
|
Lvalue::Ptr { ptr, extra } => (ptr, extra),
|
2016-11-04 17:51:53 +01:00
|
|
|
_ => bug!("expected an lvalue with optional extra data"),
|
2016-11-04 17:34:33 +01:00
|
|
|
};
|
2016-11-18 12:55:14 +01:00
|
|
|
let size = self.type_size(elem_ty)?.expect("array element cannot be unsized");
|
2016-11-04 17:34:33 +01:00
|
|
|
// FIXME: this creates a lot of stack frames if the element type has
|
|
|
|
// a drop impl
|
2016-11-18 12:55:14 +01:00
|
|
|
for i in 0..(len as u64) {
|
|
|
|
self.drop(Lvalue::Ptr { ptr: ptr.offset(i * size), extra: extra }, elem_ty, drop)?;
|
2016-11-04 17:34:33 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
// FIXME: what about TyClosure and TyAnon?
|
2016-11-03 15:22:39 +01:00
|
|
|
// other types do not need to process drop
|
|
|
|
_ => {},
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2016-11-04 15:55:05 +01:00
|
|
|
|
|
|
|
fn drop_fields<
|
|
|
|
I: Iterator<Item=(Ty<'tcx>, ty::layout::Size)>,
|
|
|
|
>(
|
|
|
|
&mut self,
|
|
|
|
mut fields: I,
|
|
|
|
lval: Lvalue<'tcx>,
|
|
|
|
drop: &mut Vec<(DefId, Value, &'tcx Substs<'tcx>)>,
|
|
|
|
) -> EvalResult<'tcx, ()> {
|
2016-11-05 13:02:29 +01:00
|
|
|
// FIXME: some aggregates may be represented by Value::ByValPair
|
2016-11-05 17:09:37 +01:00
|
|
|
let (adt_ptr, extra) = self.force_allocation(lval)?.to_ptr_and_extra();
|
2016-11-04 15:55:05 +01:00
|
|
|
// manual iteration, because we need to be careful about the last field if it is unsized
|
|
|
|
while let Some((field_ty, offset)) = fields.next() {
|
2016-11-18 12:55:14 +01:00
|
|
|
let ptr = adt_ptr.offset(offset.bytes());
|
2016-11-04 15:55:05 +01:00
|
|
|
if self.type_is_sized(field_ty) {
|
|
|
|
self.drop(Lvalue::from_ptr(ptr), field_ty, drop)?;
|
|
|
|
} else {
|
|
|
|
let lvalue = Lvalue::Ptr {
|
|
|
|
ptr: ptr,
|
|
|
|
extra: extra,
|
|
|
|
};
|
|
|
|
self.drop(lvalue, field_ty, drop)?;
|
|
|
|
break; // if it is not sized, then this is the last field anyway
|
|
|
|
}
|
|
|
|
}
|
|
|
|
assert!(fields.next().is_none());
|
|
|
|
Ok(())
|
|
|
|
}
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
2016-09-10 15:14:49 +02:00
|
|
|
pub(super) struct ImplMethod<'tcx> {
|
2016-11-17 11:31:28 +01:00
|
|
|
pub(super) method: ty::AssociatedItem,
|
2016-09-10 15:14:49 +02:00
|
|
|
pub(super) substs: &'tcx Substs<'tcx>,
|
|
|
|
pub(super) is_provided: bool,
|
2016-06-23 01:03:58 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Locates the applicable definition of a method, given its name.
|
2016-09-10 15:14:49 +02:00
|
|
|
pub(super) fn get_impl_method<'a, 'tcx>(
|
2016-06-23 01:03:58 -06:00
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
2016-08-27 01:44:46 -06:00
|
|
|
impl_def_id: DefId,
|
|
|
|
impl_substs: &'tcx Substs<'tcx>,
|
2016-06-23 01:03:58 -06:00
|
|
|
name: ast::Name,
|
|
|
|
) -> ImplMethod<'tcx> {
|
2016-09-07 10:12:15 +02:00
|
|
|
assert!(!substs.needs_infer());
|
2016-06-23 01:03:58 -06:00
|
|
|
|
|
|
|
let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
|
|
|
|
let trait_def = tcx.lookup_trait_def(trait_def_id);
|
|
|
|
|
2016-11-17 11:31:28 +01:00
|
|
|
match trait_def.ancestors(impl_def_id).defs(tcx, name, ty::AssociatedKind::Method).next() {
|
2016-06-23 01:03:58 -06:00
|
|
|
Some(node_item) => {
|
2016-09-06 16:04:51 +02:00
|
|
|
let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
|
2016-08-27 01:44:46 -06:00
|
|
|
let substs = substs.rebase_onto(tcx, trait_def_id, impl_substs);
|
2016-06-23 01:03:58 -06:00
|
|
|
let substs = traits::translate_substs(&infcx, impl_def_id,
|
|
|
|
substs, node_item.node);
|
|
|
|
tcx.lift(&substs).unwrap_or_else(|| {
|
|
|
|
bug!("trans::meth::get_impl_method: translate_substs \
|
|
|
|
returned {:?} which contains inference types/regions",
|
|
|
|
substs);
|
|
|
|
})
|
|
|
|
});
|
|
|
|
ImplMethod {
|
|
|
|
method: node_item.item,
|
|
|
|
substs: substs,
|
|
|
|
is_provided: node_item.node.is_from_trait(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
bug!("method {:?} not found in {:?}", name, impl_def_id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-09-20 12:52:01 +02:00
|
|
|
|
|
|
|
/// Locates the applicable definition of a method, given its name.
|
|
|
|
pub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
substs: &'tcx Substs<'tcx>,
|
|
|
|
impl_def_id: DefId,
|
|
|
|
impl_substs: &'tcx Substs<'tcx>,
|
|
|
|
name: ast::Name)
|
|
|
|
-> (DefId, &'tcx Substs<'tcx>)
|
|
|
|
{
|
|
|
|
assert!(!substs.needs_infer());
|
|
|
|
|
|
|
|
let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
|
|
|
|
let trait_def = tcx.lookup_trait_def(trait_def_id);
|
|
|
|
|
2016-11-17 11:31:28 +01:00
|
|
|
match trait_def.ancestors(impl_def_id).defs(tcx, name, ty::AssociatedKind::Method).next() {
|
2016-09-20 12:52:01 +02:00
|
|
|
Some(node_item) => {
|
|
|
|
let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
|
|
|
|
let substs = substs.rebase_onto(tcx, trait_def_id, impl_substs);
|
|
|
|
let substs = traits::translate_substs(&infcx, impl_def_id, substs, node_item.node);
|
|
|
|
tcx.lift(&substs).unwrap_or_else(|| {
|
|
|
|
bug!("find_method: translate_substs \
|
|
|
|
returned {:?} which contains inference types/regions",
|
|
|
|
substs);
|
|
|
|
})
|
|
|
|
});
|
|
|
|
(node_item.item.def_id, substs)
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
bug!("method {:?} not found in {:?}", name, impl_def_id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|