1
Fork 0

auto merge of #15998 : luqmana/rust/nmnnbd, r=thestinger

LLVM recently added a new attribute, dereferenceable: http://reviews.llvm.org/D4449

>This patch adds a dereferencable attribute. In some sense, this is a companion to the nonnull attribute, but specifies that the pointer is known to be dereferencable in the same sense as a pointer generated by alloca is known to be dereferencable.

With rust, everywhere that we previously marked `nonnull` we can actually mark as `dereferenceable` (which implies nonnull) since we know the size. That is, except for one case: when generating calls for TyVisitor. It seems like we haven't substituted the self type (so we have `ty_param`) and just treat it as an opaque pointer so I just left that bit as nonnull.

With this, LLVM can for example hoist a load out of a loop where it previously couldn't:

```Rust
pub fn baz(c: &uint, n: uint) -> uint {
    let mut res = 0;
    for i in range(0, n) {
        if i > 0 {
            res += *c * i;
        }
    }
    res
}
```

Before:
```llvm
define i64 @baz(i64* noalias nocapture nonnull readonly, i64) unnamed_addr #0 {
entry-block:
  br label %for_loopback.outer

for_loopback.outer:                               ; preds = %then-block-33-, %entry-block
  %.ph = phi i64 [ %.lcssa, %then-block-33- ], [ 0, %entry-block ]
  %res.0.ph = phi i64 [ %8, %then-block-33- ], [ 0, %entry-block ]
  br label %for_loopback

for_exit:                                         ; preds = %for_loopback
  %res.0.ph.lcssa = phi i64 [ %res.0.ph, %for_loopback ]
  ret i64 %res.0.ph.lcssa

for_loopback:                                     ; preds = %for_loopback.outer, %for_body
  %2 = phi i64 [ %4, %for_body ], [ %.ph, %for_loopback.outer ]
  %3 = icmp ult i64 %2, %1
  br i1 %3, label %for_body, label %for_exit

for_body:                                         ; preds = %for_loopback
  %4 = add i64 %2, 1
  %5 = icmp eq i64 %2, 0
  br i1 %5, label %for_loopback, label %then-block-33-

then-block-33-:                                   ; preds = %for_body
  %.lcssa = phi i64 [ %4, %for_body ]
  %.lcssa15 = phi i64 [ %2, %for_body ]
  %6 = load i64* %0, align 8                     ; <------- this load
  %7 = mul i64 %6, %.lcssa15
  %8 = add i64 %7, %res.0.ph
  br label %for_loopback.outer
}
```

After:
```llvm
define i64 @baz(i64* noalias nocapture readonly dereferenceable(8), i64) unnamed_addr #0 {
entry-block:
  %2 = load i64* %0, align 8                    ; <------- load once instead
  br label %for_loopback.outer

for_loopback.outer:                               ; preds = %then-block-33-, %entry-block
  %.ph = phi i64 [ %.lcssa, %then-block-33- ], [ 0, %entry-block ]
  %res.0.ph = phi i64 [ %8, %then-block-33- ], [ 0, %entry-block ]
  br label %for_loopback

for_exit:                                         ; preds = %for_loopback
  %res.0.ph.lcssa = phi i64 [ %res.0.ph, %for_loopback ]
  ret i64 %res.0.ph.lcssa

for_loopback:                                     ; preds = %for_loopback.outer, %for_body
  %3 = phi i64 [ %5, %for_body ], [ %.ph, %for_loopback.outer ]
  %4 = icmp ult i64 %3, %1
  br i1 %4, label %for_body, label %for_exit

for_body:                                         ; preds = %for_loopback
  %5 = add i64 %3, 1
  %6 = icmp eq i64 %3, 0
  br i1 %6, label %for_loopback, label %then-block-33-

then-block-33-:                                   ; preds = %for_body
  %.lcssa = phi i64 [ %5, %for_body ]
  %.lcssa15 = phi i64 [ %3, %for_body ]
  %7 = mul i64 %2, %.lcssa15
  %8 = add i64 %7, %res.0.ph
  br label %for_loopback.outer
}
```
This commit is contained in:
bors 2014-07-26 15:46:18 +00:00
commit 7aa407958b
10 changed files with 237 additions and 81 deletions

View file

@ -281,11 +281,7 @@ pub fn decl_rust_fn(ccx: &CrateContext, fn_ty: ty::t, name: &str) -> ValueRef {
let llfn = decl_fn(ccx, name, llvm::CCallConv, llfty, output);
let attrs = get_fn_llvm_attributes(ccx, fn_ty);
for &(idx, attr) in attrs.iter() {
unsafe {
llvm::LLVMAddFunctionAttribute(llfn, idx as c_uint, attr);
}
}
attrs.apply_llfn(llfn);
llfn
}
@ -962,7 +958,7 @@ pub fn invoke<'a>(
llargs.as_slice(),
normal_bcx.llbb,
landing_pad,
attributes.as_slice());
Some(attributes));
return (llresult, normal_bcx);
} else {
debug!("calling {} at {}", llfn, bcx.llbb);
@ -975,7 +971,7 @@ pub fn invoke<'a>(
None => debuginfo::clear_source_location(bcx.fcx)
};
let llresult = Call(bcx, llfn, llargs.as_slice(), attributes.as_slice());
let llresult = Call(bcx, llfn, llargs.as_slice(), Some(attributes));
return (llresult, bcx);
}
}
@ -1081,7 +1077,7 @@ pub fn call_lifetime_start(cx: &Block, ptr: ValueRef) {
let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type()));
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_start = ccx.get_intrinsic(&"llvm.lifetime.start");
Call(cx, lifetime_start, [llsize, ptr], []);
Call(cx, lifetime_start, [llsize, ptr], None);
}
pub fn call_lifetime_end(cx: &Block, ptr: ValueRef) {
@ -1095,7 +1091,7 @@ pub fn call_lifetime_end(cx: &Block, ptr: ValueRef) {
let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type()));
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_end = ccx.get_intrinsic(&"llvm.lifetime.end");
Call(cx, lifetime_end, [llsize, ptr], []);
Call(cx, lifetime_end, [llsize, ptr], None);
}
pub fn call_memcpy(cx: &Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) {
@ -1111,7 +1107,7 @@ pub fn call_memcpy(cx: &Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef,
let size = IntCast(cx, n_bytes, ccx.int_type);
let align = C_i32(ccx, align as i32);
let volatile = C_bool(ccx, false);
Call(cx, memcpy, [dst_ptr, src_ptr, size, align, volatile], []);
Call(cx, memcpy, [dst_ptr, src_ptr, size, align, volatile], None);
}
pub fn memcpy_ty(bcx: &Block, dst: ValueRef, src: ValueRef, t: ty::t) {
@ -1156,7 +1152,7 @@ fn memzero(b: &Builder, llptr: ValueRef, ty: Type) {
let size = machine::llsize_of(ccx, ty);
let align = C_i32(ccx, llalign_of_min(ccx, ty) as i32);
let volatile = C_bool(ccx, false);
b.call(llintrinsicfn, [llptr, llzeroval, size, align, volatile], []);
b.call(llintrinsicfn, [llptr, llzeroval, size, align, volatile], None);
}
pub fn alloc_ty(bcx: &Block, t: ty::t, name: &str) -> ValueRef {
@ -2040,7 +2036,7 @@ fn register_fn(ccx: &CrateContext,
}
pub fn get_fn_llvm_attributes(ccx: &CrateContext, fn_ty: ty::t)
-> Vec<(uint, u64)> {
-> llvm::AttrBuilder {
use middle::ty::{BrAnon, ReLateBound};
let (fn_sig, abi, has_env) = match ty::get(fn_ty).sty {
@ -2056,31 +2052,33 @@ pub fn get_fn_llvm_attributes(ccx: &CrateContext, fn_ty: ty::t)
_ => fail!("expected closure or function.")
};
// Since index 0 is the return value of the llvm func, we start
// at either 1 or 2 depending on whether there's an env slot or not
let mut first_arg_offset = if has_env { 2 } else { 1 };
let mut attrs = llvm::AttrBuilder::new();
let ret_ty = fn_sig.output;
// These have an odd calling convention, so we skip them for now.
//
// FIXME(pcwalton): We don't have to skip them; just untuple the result.
if abi == RustCall {
return Vec::new()
return attrs;
}
// Since index 0 is the return value of the llvm func, we start
// at either 1 or 2 depending on whether there's an env slot or not
let mut first_arg_offset = if has_env { 2 } else { 1 };
let mut attrs = Vec::new();
let ret_ty = fn_sig.output;
// A function pointer is called without the declaration
// available, so we have to apply any attributes with ABI
// implications directly to the call instruction. Right now,
// the only attribute we need to worry about is `sret`.
if type_of::return_uses_outptr(ccx, ret_ty) {
attrs.push((1, llvm::StructRetAttribute as u64));
let llret_sz = llsize_of_real(ccx, type_of::type_of(ccx, ret_ty));
// The outptr can be noalias and nocapture because it's entirely
// invisible to the program. We can also mark it as nonnull
attrs.push((1, llvm::NoAliasAttribute as u64));
attrs.push((1, llvm::NoCaptureAttribute as u64));
attrs.push((1, llvm::NonNullAttribute as u64));
// invisible to the program. We also know it's nonnull as well
// as how many bytes we can dereference
attrs.arg(1, llvm::StructRetAttribute)
.arg(1, llvm::NoAliasAttribute)
.arg(1, llvm::NoCaptureAttribute)
.arg(1, llvm::DereferenceableAttribute(llret_sz));
// Add one more since there's an outptr
first_arg_offset += 1;
@ -2094,27 +2092,28 @@ pub fn get_fn_llvm_attributes(ccx: &CrateContext, fn_ty: ty::t)
ty::ty_str | ty::ty_vec(..) | ty::ty_trait(..) => true, _ => false
} => {}
ty::ty_uniq(_) => {
attrs.push((llvm::ReturnIndex as uint, llvm::NoAliasAttribute as u64));
attrs.ret(llvm::NoAliasAttribute);
}
_ => {}
}
// We can also mark the return value as `nonnull` in certain cases
// We can also mark the return value as `dereferenceable` in certain cases
match ty::get(ret_ty).sty {
// These are not really pointers but pairs, (pointer, len)
ty::ty_uniq(it) |
ty::ty_rptr(_, ty::mt { ty: it, .. }) if match ty::get(it).sty {
ty::ty_str | ty::ty_vec(..) | ty::ty_trait(..) => true, _ => false
} => {}
ty::ty_uniq(_) | ty::ty_rptr(_, _) => {
attrs.push((llvm::ReturnIndex as uint, llvm::NonNullAttribute as u64));
ty::ty_uniq(inner) | ty::ty_rptr(_, ty::mt { ty: inner, .. }) => {
let llret_sz = llsize_of_real(ccx, type_of::type_of(ccx, inner));
attrs.ret(llvm::DereferenceableAttribute(llret_sz));
}
_ => {}
}
match ty::get(ret_ty).sty {
ty::ty_bool => {
attrs.push((llvm::ReturnIndex as uint, llvm::ZExtAttribute as u64));
attrs.ret(llvm::ZExtAttribute);
}
_ => {}
}
@ -2124,44 +2123,77 @@ pub fn get_fn_llvm_attributes(ccx: &CrateContext, fn_ty: ty::t)
match ty::get(t).sty {
// this needs to be first to prevent fat pointers from falling through
_ if !type_is_immediate(ccx, t) => {
let llarg_sz = llsize_of_real(ccx, type_of::type_of(ccx, t));
// For non-immediate arguments the callee gets its own copy of
// the value on the stack, so there are no aliases. It's also
// program-invisible so can't possibly capture
attrs.push((idx, llvm::NoAliasAttribute as u64));
attrs.push((idx, llvm::NoCaptureAttribute as u64));
attrs.push((idx, llvm::NonNullAttribute as u64));
attrs.arg(idx, llvm::NoAliasAttribute)
.arg(idx, llvm::NoCaptureAttribute)
.arg(idx, llvm::DereferenceableAttribute(llarg_sz));
}
ty::ty_bool => {
attrs.push((idx, llvm::ZExtAttribute as u64));
attrs.arg(idx, llvm::ZExtAttribute);
}
// `~` pointer parameters never alias because ownership is transferred
ty::ty_uniq(_) => {
attrs.push((idx, llvm::NoAliasAttribute as u64));
attrs.push((idx, llvm::NonNullAttribute as u64));
ty::ty_uniq(inner) => {
let llsz = llsize_of_real(ccx, type_of::type_of(ccx, inner));
attrs.arg(idx, llvm::NoAliasAttribute)
.arg(idx, llvm::DereferenceableAttribute(llsz));
}
// The visit glue deals only with opaque pointers so we don't
// actually know the concrete type of Self thus we don't know how
// many bytes to mark as dereferenceable so instead we just mark
// it as nonnull which still holds true
ty::ty_rptr(b, ty::mt { ty: it, mutbl }) if match ty::get(it).sty {
ty::ty_param(_) => true, _ => false
} && mutbl == ast::MutMutable => {
attrs.arg(idx, llvm::NoAliasAttribute)
.arg(idx, llvm::NonNullAttribute);
match b {
ReLateBound(_, BrAnon(_)) => {
attrs.arg(idx, llvm::NoCaptureAttribute);
}
_ => {}
}
}
// `&mut` pointer parameters never alias other parameters, or mutable global data
// `&` pointer parameters never alias either (for LLVM's purposes) as long as the
// interior is safe
ty::ty_rptr(b, mt) if mt.mutbl == ast::MutMutable ||
!ty::type_contents(ccx.tcx(), mt.ty).interior_unsafe() => {
attrs.push((idx, llvm::NoAliasAttribute as u64));
attrs.push((idx, llvm::NonNullAttribute as u64));
let llsz = llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));
attrs.arg(idx, llvm::NoAliasAttribute)
.arg(idx, llvm::DereferenceableAttribute(llsz));
match b {
ReLateBound(_, BrAnon(_)) => {
attrs.push((idx, llvm::NoCaptureAttribute as u64));
attrs.arg(idx, llvm::NoCaptureAttribute);
}
_ => {}
}
}
// When a reference in an argument has no named lifetime, it's impossible for that
// reference to escape this function (returned or stored beyond the call by a closure).
ty::ty_rptr(ReLateBound(_, BrAnon(_)), _) => {
attrs.push((idx, llvm::NoCaptureAttribute as u64));
attrs.push((idx, llvm::NonNullAttribute as u64));
ty::ty_rptr(ReLateBound(_, BrAnon(_)), mt) => {
let llsz = llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));
attrs.arg(idx, llvm::NoCaptureAttribute)
.arg(idx, llvm::DereferenceableAttribute(llsz));
}
// & pointer parameters are never null
ty::ty_rptr(_, _) => {
attrs.push((idx, llvm::NonNullAttribute as u64));
// & pointer parameters are also never null and we know exactly how
// many bytes we can dereference
ty::ty_rptr(_, mt) => {
let llsz = llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));
attrs.arg(idx, llvm::DereferenceableAttribute(llsz));
}
_ => ()
}

View file

@ -12,7 +12,7 @@
#![allow(non_snake_case_functions)]
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect, AttrBuilder};
use llvm::{Opcode, IntPredicate, RealPredicate};
use llvm::{ValueRef, BasicBlockRef};
use middle::trans::common::*;
@ -113,7 +113,7 @@ pub fn Invoke(cx: &Block,
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
attributes: &[(uint, u64)])
attributes: Option<AttrBuilder>)
-> ValueRef {
if cx.unreachable.get() {
return C_null(Type::i8(cx.ccx()));
@ -681,13 +681,13 @@ pub fn InlineAsmCall(cx: &Block, asm: *const c_char, cons: *const c_char,
}
pub fn Call(cx: &Block, fn_: ValueRef, args: &[ValueRef],
attributes: &[(uint, u64)]) -> ValueRef {
attributes: Option<AttrBuilder>) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
B(cx).call(fn_, args, attributes)
}
pub fn CallWithConv(cx: &Block, fn_: ValueRef, args: &[ValueRef], conv: CallConv,
attributes: &[(uint, u64)]) -> ValueRef {
attributes: Option<AttrBuilder>) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
B(cx).call_with_conv(fn_, args, conv, attributes)
}

View file

@ -11,7 +11,7 @@
#![allow(dead_code)] // FFI wrappers
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect, AttrBuilder};
use llvm::{Opcode, IntPredicate, RealPredicate, False};
use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
use middle::trans::base;
@ -155,7 +155,7 @@ impl<'a> Builder<'a> {
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
attributes: &[(uint, u64)])
attributes: Option<AttrBuilder>)
-> ValueRef {
self.count_insn("invoke");
@ -174,8 +174,9 @@ impl<'a> Builder<'a> {
then,
catch,
noname());
for &(idx, attr) in attributes.iter() {
llvm::LLVMAddCallSiteAttribute(v, idx as c_uint, attr);
match attributes {
Some(a) => a.apply_callsite(v),
None => {}
}
v
}
@ -777,7 +778,7 @@ impl<'a> Builder<'a> {
c, noname(), False, False)
}
});
self.call(asm, [], []);
self.call(asm, [], None);
}
}
@ -802,12 +803,12 @@ impl<'a> Builder<'a> {
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
self.call(v, inputs, [])
self.call(v, inputs, None)
}
}
pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
attributes: &[(uint, u64)]) -> ValueRef {
attributes: Option<AttrBuilder>) -> ValueRef {
self.count_insn("call");
debug!("Call {} with args ({})",
@ -820,15 +821,16 @@ impl<'a> Builder<'a> {
unsafe {
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
args.len() as c_uint, noname());
for &(idx, attr) in attributes.iter() {
llvm::LLVMAddCallSiteAttribute(v, idx as c_uint, attr);
match attributes {
Some(a) => a.apply_callsite(v),
None => {}
}
v
}
}
pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef],
conv: CallConv, attributes: &[(uint, u64)]) -> ValueRef {
conv: CallConv, attributes: Option<AttrBuilder>) -> ValueRef {
self.count_insn("callwithconv");
let v = self.call(llfn, args, attributes);
llvm::SetInstructionCallConv(v, conv);

View file

@ -588,7 +588,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext,
}
llargs.extend(args.iter().map(|arg| arg.val));
let retval = Call(bcx, fn_ptr, llargs.as_slice(), []);
let retval = Call(bcx, fn_ptr, llargs.as_slice(), None);
if type_is_zero_size(ccx, f.sig.output) || fcx.llretptr.get().is_some() {
RetVoid(bcx);
} else {

View file

@ -553,7 +553,7 @@ fn trans_index<'a>(bcx: &'a Block<'a>,
let expected = Call(bcx,
expect,
[bounds_check, C_bool(ccx, false)],
[]);
None);
bcx = with_cond(bcx, expected, |bcx| {
controlflow::trans_fail_bounds_check(bcx,
index_expr.span,

View file

@ -382,21 +382,24 @@ pub fn trans_native_call<'a>(
// A function pointer is called without the declaration available, so we have to apply
// any attributes with ABI implications directly to the call instruction.
let mut attrs = Vec::new();
let mut attrs = llvm::AttrBuilder::new();
// Add attributes that are always applicable, independent of the concrete foreign ABI
if fn_type.ret_ty.is_indirect() {
let llret_sz = machine::llsize_of_real(ccx, fn_type.ret_ty.ty);
// The outptr can be noalias and nocapture because it's entirely
// invisible to the program. We can also mark it as nonnull
attrs.push((1, llvm::NoAliasAttribute as u64));
attrs.push((1, llvm::NoCaptureAttribute as u64));
attrs.push((1, llvm::NonNullAttribute as u64));
// invisible to the program. We also know it's nonnull as well
// as how many bytes we can dereference
attrs.arg(1, llvm::NoAliasAttribute)
.arg(1, llvm::NoCaptureAttribute)
.arg(1, llvm::DereferenceableAttribute(llret_sz));
};
// Add attributes that depend on the concrete foreign ABI
let mut arg_idx = if fn_type.ret_ty.is_indirect() { 1 } else { 0 };
match fn_type.ret_ty.attr {
Some(attr) => attrs.push((arg_idx, attr as u64)),
Some(attr) => { attrs.arg(arg_idx, attr); },
_ => ()
}
@ -409,7 +412,7 @@ pub fn trans_native_call<'a>(
if arg_ty.pad.is_some() { arg_idx += 1; }
match arg_ty.attr {
Some(attr) => attrs.push((arg_idx, attr as u64)),
Some(attr) => { attrs.arg(arg_idx, attr); },
_ => {}
}
@ -420,7 +423,7 @@ pub fn trans_native_call<'a>(
llfn,
llargs_foreign.as_slice(),
cc,
attrs.as_slice());
Some(attrs));
// If the function we just called does not use an outpointer,
// store the result into the rust outpointer. Cast the outpointer
@ -762,7 +765,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext,
// Perform the call itself
debug!("calling llrustfn = {}, t = {}", ccx.tn.val_to_string(llrustfn), t.repr(ccx.tcx()));
let attributes = base::get_fn_llvm_attributes(ccx, t);
let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), attributes.as_slice());
let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), Some(attributes));
// Get the return value where the foreign fn expects it.
let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast {

View file

@ -129,7 +129,7 @@ pub fn drop_ty<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
} else {
v
};
Call(bcx, glue, [ptr], []);
Call(bcx, glue, [ptr], None);
}
bcx
}
@ -185,7 +185,7 @@ pub fn call_visit_glue(bcx: &Block, v: ValueRef, tydesc: ValueRef) {
let llfn = Load(bcx, GEPi(bcx, tydesc, [0u, abi::tydesc_field_visit_glue]));
let llrawptr = PointerCast(bcx, v, Type::i8p(bcx.ccx()));
Call(bcx, llfn, [llrawptr], []);
Call(bcx, llfn, [llrawptr], None);
}
fn make_visit_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
@ -303,7 +303,7 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
Call(bcx,
dtor,
[PointerCast(bcx, lluniquevalue, Type::i8p(bcx.ccx()))],
[]);
None);
bcx
})
}
@ -342,7 +342,7 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
let dtor_ptr = GEPi(bcx, env, [0u, abi::box_field_tydesc]);
let dtor = Load(bcx, dtor_ptr);
let cdata = GEPi(bcx, env, [0u, abi::box_field_body]);
Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p(bcx.ccx()))], []);
Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p(bcx.ccx()))], None);
// Free the environment itself
// FIXME: #13994: pass align and size here

View file

@ -208,17 +208,17 @@ pub fn trans_intrinsic_call<'a>(mut bcx: &'a Block<'a>, node: ast::NodeId,
let llval = match (simple, name.get()) {
(Some(llfn), _) => {
Call(bcx, llfn, llargs.as_slice(), [])
Call(bcx, llfn, llargs.as_slice(), None)
}
(_, "abort") => {
let llfn = ccx.get_intrinsic(&("llvm.trap"));
let v = Call(bcx, llfn, [], []);
let v = Call(bcx, llfn, [], None);
Unreachable(bcx);
v
}
(_, "breakpoint") => {
let llfn = ccx.get_intrinsic(&("llvm.debugtrap"));
Call(bcx, llfn, [], [])
Call(bcx, llfn, [], None)
}
(_, "size_of") => {
let tp_ty = *substs.types.get(FnSpace, 0);
@ -553,7 +553,7 @@ fn copy_intrinsic(bcx: &Block, allow_overlap: bool, volatile: bool,
let llfn = ccx.get_intrinsic(&name);
Call(bcx, llfn, [dst_ptr, src_ptr, Mul(bcx, size, count), align,
C_bool(ccx, volatile)], [])
C_bool(ccx, volatile)], None)
}
fn memset_intrinsic(bcx: &Block, volatile: bool, tp_ty: ty::t,
@ -572,13 +572,13 @@ fn memset_intrinsic(bcx: &Block, volatile: bool, tp_ty: ty::t,
let llfn = ccx.get_intrinsic(&name);
Call(bcx, llfn, [dst_ptr, val, Mul(bcx, size, count), align,
C_bool(ccx, volatile)], [])
C_bool(ccx, volatile)], None)
}
fn count_zeros_intrinsic(bcx: &Block, name: &'static str, val: ValueRef) -> ValueRef {
let y = C_bool(bcx.ccx(), false);
let llfn = bcx.ccx().get_intrinsic(&name);
Call(bcx, llfn, [val, y], [])
Call(bcx, llfn, [val, y], None)
}
fn with_overflow_intrinsic(bcx: &Block, name: &'static str, t: ty::t,
@ -586,7 +586,7 @@ fn with_overflow_intrinsic(bcx: &Block, name: &'static str, t: ty::t,
let llfn = bcx.ccx().get_intrinsic(&name);
// Convert `i1` to a `bool`, and write it to the out parameter
let val = Call(bcx, llfn, [a, b], []);
let val = Call(bcx, llfn, [a, b], None);
let result = ExtractValue(bcx, val, 0);
let overflow = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
let ret = C_undef(type_of::type_of(bcx.ccx(), t));

View file

@ -130,12 +130,101 @@ pub enum OtherAttribute {
NonNullAttribute = 1 << 44,
}
pub enum SpecialAttribute {
DereferenceableAttribute(u64)
}
#[repr(C)]
pub enum AttributeSet {
ReturnIndex = 0,
FunctionIndex = !0
}
trait AttrHelper {
fn apply_llfn(&self, idx: c_uint, llfn: ValueRef);
fn apply_callsite(&self, idx: c_uint, callsite: ValueRef);
}
impl AttrHelper for Attribute {
fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) {
unsafe {
LLVMAddFunctionAttribute(llfn, idx, *self as uint64_t);
}
}
fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) {
unsafe {
LLVMAddCallSiteAttribute(callsite, idx, *self as uint64_t);
}
}
}
impl AttrHelper for OtherAttribute {
fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) {
unsafe {
LLVMAddFunctionAttribute(llfn, idx, *self as uint64_t);
}
}
fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) {
unsafe {
LLVMAddCallSiteAttribute(callsite, idx, *self as uint64_t);
}
}
}
impl AttrHelper for SpecialAttribute {
fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) {
match *self {
DereferenceableAttribute(bytes) => unsafe {
LLVMAddDereferenceableAttr(llfn, idx, bytes as uint64_t);
}
}
}
fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) {
match *self {
DereferenceableAttribute(bytes) => unsafe {
LLVMAddDereferenceableCallSiteAttr(callsite, idx, bytes as uint64_t);
}
}
}
}
pub struct AttrBuilder {
attrs: Vec<(uint, Box<AttrHelper>)>
}
impl AttrBuilder {
pub fn new() -> AttrBuilder {
AttrBuilder {
attrs: Vec::new()
}
}
pub fn arg<'a, T: AttrHelper + 'static>(&'a mut self, idx: uint, a: T) -> &'a mut AttrBuilder {
self.attrs.push((idx, box a as Box<AttrHelper>));
self
}
pub fn ret<'a, T: AttrHelper + 'static>(&'a mut self, a: T) -> &'a mut AttrBuilder {
self.attrs.push((ReturnIndex as uint, box a as Box<AttrHelper>));
self
}
pub fn apply_llfn(&self, llfn: ValueRef) {
for &(idx, ref attr) in self.attrs.iter() {
attr.apply_llfn(idx as c_uint, llfn);
}
}
pub fn apply_callsite(&self, callsite: ValueRef) {
for &(idx, ref attr) in self.attrs.iter() {
attr.apply_callsite(idx as c_uint, callsite);
}
}
}
// enum for the LLVM IntPredicate type
pub enum IntPredicate {
IntEQ = 32,
@ -740,6 +829,7 @@ extern {
pub fn LLVMSetFunctionCallConv(Fn: ValueRef, CC: c_uint);
pub fn LLVMGetGC(Fn: ValueRef) -> *const c_char;
pub fn LLVMSetGC(Fn: ValueRef, Name: *const c_char);
pub fn LLVMAddDereferenceableAttr(Fn: ValueRef, index: c_uint, bytes: uint64_t);
pub fn LLVMAddFunctionAttribute(Fn: ValueRef, index: c_uint, PA: uint64_t);
pub fn LLVMAddFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char);
pub fn LLVMRemoveFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char);
@ -811,6 +901,9 @@ extern {
pub fn LLVMAddCallSiteAttribute(Instr: ValueRef,
index: c_uint,
Val: uint64_t);
pub fn LLVMAddDereferenceableCallSiteAttr(Instr: ValueRef,
index: c_uint,
bytes: uint64_t);
/* Operations on call instructions (only) */
pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool;

View file

@ -113,6 +113,21 @@ extern "C" void LLVMAddCallSiteAttribute(LLVMValueRef Instr, unsigned index, uin
index, B)));
}
#if LLVM_VERSION_MINOR >= 5
extern "C" void LLVMAddDereferenceableCallSiteAttr(LLVMValueRef Instr, unsigned idx, uint64_t b) {
CallSite Call = CallSite(unwrap<Instruction>(Instr));
AttrBuilder B;
B.addDereferenceableAttr(b);
Call.setAttributes(
Call.getAttributes().addAttributes(Call->getContext(), idx,
AttributeSet::get(Call->getContext(),
idx, B)));
}
#else
extern "C" void LLVMAddDereferenceableCallSiteAttr(LLVMValueRef, unsigned, uint64_t) {}
#endif
extern "C" void LLVMAddFunctionAttribute(LLVMValueRef Fn, unsigned index, uint64_t Val) {
Function *A = unwrap<Function>(Fn);
AttrBuilder B;
@ -120,6 +135,17 @@ extern "C" void LLVMAddFunctionAttribute(LLVMValueRef Fn, unsigned index, uint64
A->addAttributes(index, AttributeSet::get(A->getContext(), index, B));
}
#if LLVM_VERSION_MINOR >= 5
extern "C" void LLVMAddDereferenceableAttr(LLVMValueRef Fn, unsigned index, uint64_t bytes) {
Function *A = unwrap<Function>(Fn);
AttrBuilder B;
B.addDereferenceableAttr(bytes);
A->addAttributes(index, AttributeSet::get(A->getContext(), index, B));
}
#else
extern "C" void LLVMAddDereferenceableAttr(LLVMValueRef, unsigned, uint64_t) {}
#endif
extern "C" void LLVMAddFunctionAttrString(LLVMValueRef Fn, unsigned index, const char *Name) {
Function *F = unwrap<Function>(Fn);
AttrBuilder B;