2015-02-28 23:53:12 +02:00
|
|
|
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
2012-12-03 16:48:01 -08:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
2016-02-15 15:41:16 -05:00
|
|
|
|
2015-02-28 23:53:12 +02:00
|
|
|
//! Translate the completed AST to the LLVM IR.
|
|
|
|
//!
|
|
|
|
//! Some functions here, such as trans_block and trans_expr, return a value --
|
2016-02-23 21:39:35 +02:00
|
|
|
//! the result of the translation to LLVM -- while others, such as trans_fn
|
|
|
|
//! and trans_item, are called only for the side effect of adding a
|
2015-02-28 23:53:12 +02:00
|
|
|
//! particular definition to the LLVM IR output we're producing.
|
|
|
|
//!
|
|
|
|
//! Hopefully useful general knowledge about trans:
|
|
|
|
//!
|
|
|
|
//! * There's no way to find out the Ty type of a ValueRef. Doing so
|
|
|
|
//! would be "trying to get the eggs out of an omelette" (credit:
|
|
|
|
//! pcwalton). You can, instead, find out its TypeRef by calling val_ty,
|
|
|
|
//! but one TypeRef corresponds to many `Ty`s; for instance, tup(int, int,
|
|
|
|
//! int) and rec(x=int, y=int, z=int) will have the same TypeRef.
|
2011-12-13 16:25:51 -08:00
|
|
|
|
2016-07-21 12:49:59 -04:00
|
|
|
use super::ModuleLlvm;
|
|
|
|
use super::ModuleSource;
|
2014-11-27 07:21:26 -05:00
|
|
|
use super::ModuleTranslation;
|
2017-07-25 17:26:24 +02:00
|
|
|
use super::ModuleKind;
|
2014-11-27 07:21:26 -05:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
use assert_module_sources::{self, Disposition};
|
2016-05-12 19:52:38 +03:00
|
|
|
use back::link;
|
2017-09-12 09:32:37 -07:00
|
|
|
use back::symbol_export;
|
2017-07-26 11:41:34 +02:00
|
|
|
use back::write::{self, OngoingCrateTranslation};
|
2017-09-12 11:04:46 -07:00
|
|
|
use llvm::{ContextRef, ModuleRef, ValueRef, Vector, get_param};
|
2014-07-07 17:58:01 -07:00
|
|
|
use llvm;
|
2017-04-26 23:22:45 +02:00
|
|
|
use metadata;
|
2017-09-13 15:24:13 -07:00
|
|
|
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
2017-05-27 20:48:09 +02:00
|
|
|
use rustc::middle::lang_items::StartFnLangItem;
|
2017-09-13 20:26:39 -07:00
|
|
|
use rustc::middle::trans::{Linkage, Visibility, Stats};
|
2017-07-26 15:02:53 +02:00
|
|
|
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
|
2016-12-31 16:00:24 -07:00
|
|
|
use rustc::ty::{self, Ty, TyCtxt};
|
2017-09-12 11:04:46 -07:00
|
|
|
use rustc::ty::maps::Providers;
|
2017-09-18 18:03:09 +02:00
|
|
|
use rustc::dep_graph::{DepNode, DepKind};
|
2017-08-31 12:08:29 -07:00
|
|
|
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
|
2017-07-31 14:51:47 +02:00
|
|
|
use rustc::util::common::{time, print_time_passes_entry};
|
2017-09-13 20:26:39 -07:00
|
|
|
use rustc::session::config::{self, NoDebugInfo};
|
2017-05-22 14:20:12 -04:00
|
|
|
use rustc::session::Session;
|
2017-09-18 18:03:09 +02:00
|
|
|
use rustc_incremental;
|
2017-02-07 22:46:21 +01:00
|
|
|
use abi;
|
2017-06-03 14:54:08 -07:00
|
|
|
use allocator;
|
2017-01-01 15:50:15 -07:00
|
|
|
use mir::lvalue::LvalueRef;
|
2016-03-22 19:23:36 +02:00
|
|
|
use attributes;
|
2016-12-16 18:48:25 -07:00
|
|
|
use builder::Builder;
|
2017-03-08 18:33:21 +02:00
|
|
|
use callee;
|
2017-08-05 12:27:28 +03:00
|
|
|
use common::{C_bool, C_bytes_in_context, C_i32, C_usize};
|
2016-06-16 18:56:14 -04:00
|
|
|
use collector::{self, TransItemCollectionMode};
|
2017-02-20 14:42:47 -05:00
|
|
|
use common::{C_struct_in_context, C_u64, C_undef, C_array};
|
2017-01-01 00:42:09 -07:00
|
|
|
use common::CrateContext;
|
2016-08-16 17:41:38 +03:00
|
|
|
use common::{type_is_zero_size, val_ty};
|
2016-03-22 19:23:36 +02:00
|
|
|
use common;
|
|
|
|
use consts;
|
2017-09-13 20:26:39 -07:00
|
|
|
use context::{self, LocalCrateContext, SharedCrateContext};
|
2016-12-16 13:25:18 -07:00
|
|
|
use debuginfo;
|
2016-03-22 19:23:36 +02:00
|
|
|
use declare;
|
|
|
|
use machine;
|
|
|
|
use meth;
|
|
|
|
use mir;
|
|
|
|
use monomorphize::{self, Instance};
|
2017-09-12 11:04:46 -07:00
|
|
|
use partitioning::{self, PartitioningStrategy, CodegenUnit, CodegenUnitExt};
|
2016-03-22 19:23:36 +02:00
|
|
|
use symbol_names_test;
|
2017-07-27 13:02:31 +02:00
|
|
|
use time_graph;
|
2017-09-12 11:04:46 -07:00
|
|
|
use trans_item::{TransItem, TransItemExt, DefPathBasedNames};
|
2016-03-22 19:23:36 +02:00
|
|
|
use type_::Type;
|
|
|
|
use type_of;
|
|
|
|
use value::Value;
|
2017-09-13 15:24:13 -07:00
|
|
|
use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet, DefIdSet};
|
2017-08-28 15:55:32 -07:00
|
|
|
use CrateInfo;
|
2013-06-16 22:52:44 +12:00
|
|
|
|
2015-02-28 23:55:50 +02:00
|
|
|
use libc::c_uint;
|
2017-09-13 16:03:24 -07:00
|
|
|
use std::any::Any;
|
2017-09-13 20:26:39 -07:00
|
|
|
use std::cell::RefCell;
|
2015-02-17 22:47:40 -08:00
|
|
|
use std::ffi::{CStr, CString};
|
2014-11-25 13:28:35 -08:00
|
|
|
use std::str;
|
2017-07-12 17:37:58 +02:00
|
|
|
use std::sync::Arc;
|
2017-07-31 14:51:47 +02:00
|
|
|
use std::time::{Instant, Duration};
|
2016-08-16 17:41:38 +03:00
|
|
|
use std::i32;
|
2017-09-13 16:03:24 -07:00
|
|
|
use std::sync::mpsc;
|
2017-03-08 01:41:26 +02:00
|
|
|
use syntax_pos::Span;
|
2017-09-13 20:26:39 -07:00
|
|
|
use syntax_pos::symbol::InternedString;
|
2015-09-14 21:58:20 +12:00
|
|
|
use syntax::attr;
|
2016-03-29 08:50:44 +03:00
|
|
|
use rustc::hir;
|
2015-07-31 00:04:06 -07:00
|
|
|
use syntax::ast;
|
2012-03-03 17:49:23 -08:00
|
|
|
|
2017-02-06 17:27:09 +01:00
|
|
|
use mir::lvalue::Alignment;
|
|
|
|
|
2014-04-22 15:56:37 +03:00
|
|
|
pub struct StatRecorder<'a, 'tcx: 'a> {
|
|
|
|
ccx: &'a CrateContext<'a, 'tcx>,
|
2014-05-22 16:57:53 -07:00
|
|
|
name: Option<String>,
|
2015-03-25 17:06:52 -07:00
|
|
|
istart: usize,
|
2013-06-28 11:15:34 -07:00
|
|
|
}
|
|
|
|
|
2014-04-22 15:56:37 +03:00
|
|
|
impl<'a, 'tcx> StatRecorder<'a, 'tcx> {
|
2015-11-19 12:36:31 +01:00
|
|
|
pub fn new(ccx: &'a CrateContext<'a, 'tcx>, name: String) -> StatRecorder<'a, 'tcx> {
|
2017-09-13 20:26:39 -07:00
|
|
|
let istart = ccx.stats().borrow().n_llvm_insns;
|
2013-06-28 11:15:34 -07:00
|
|
|
StatRecorder {
|
2017-08-06 22:54:09 -07:00
|
|
|
ccx,
|
2014-02-14 07:07:09 +02:00
|
|
|
name: Some(name),
|
2017-08-06 22:54:09 -07:00
|
|
|
istart,
|
2013-06-28 11:15:34 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-22 15:56:37 +03:00
|
|
|
impl<'a, 'tcx> Drop for StatRecorder<'a, 'tcx> {
|
2013-09-16 21:18:07 -04:00
|
|
|
fn drop(&mut self) {
|
2014-03-05 16:36:01 +02:00
|
|
|
if self.ccx.sess().trans_stats() {
|
2017-09-13 20:26:39 -07:00
|
|
|
let mut stats = self.ccx.stats().borrow_mut();
|
|
|
|
let iend = stats.n_llvm_insns;
|
|
|
|
stats.fn_stats.push((self.name.take().unwrap(), iend - self.istart));
|
|
|
|
stats.n_fns += 1;
|
2013-06-28 11:15:34 -07:00
|
|
|
// Reset LLVM insn count to avoid compound costs.
|
2017-09-13 20:26:39 -07:00
|
|
|
stats.n_llvm_insns = self.istart;
|
2013-06-28 11:15:34 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-11 15:03:52 -07:00
|
|
|
pub fn get_meta(bcx: &Builder, fat_ptr: ValueRef) -> ValueRef {
|
2016-12-11 08:59:20 -07:00
|
|
|
bcx.struct_gep(fat_ptr, abi::FAT_PTR_EXTRA)
|
2016-08-16 17:41:38 +03:00
|
|
|
}
|
|
|
|
|
2016-12-11 15:03:52 -07:00
|
|
|
pub fn get_dataptr(bcx: &Builder, fat_ptr: ValueRef) -> ValueRef {
|
2016-12-11 08:59:20 -07:00
|
|
|
bcx.struct_gep(fat_ptr, abi::FAT_PTR_ADDR)
|
2014-07-29 22:08:39 -07:00
|
|
|
}
|
|
|
|
|
2016-03-29 01:46:02 +02:00
|
|
|
pub fn bin_op_to_icmp_predicate(op: hir::BinOp_,
|
2015-11-19 12:36:31 +01:00
|
|
|
signed: bool)
|
2015-01-29 14:03:34 +02:00
|
|
|
-> llvm::IntPredicate {
|
|
|
|
match op {
|
2015-07-31 00:04:06 -07:00
|
|
|
hir::BiEq => llvm::IntEQ,
|
|
|
|
hir::BiNe => llvm::IntNE,
|
|
|
|
hir::BiLt => if signed { llvm::IntSLT } else { llvm::IntULT },
|
|
|
|
hir::BiLe => if signed { llvm::IntSLE } else { llvm::IntULE },
|
|
|
|
hir::BiGt => if signed { llvm::IntSGT } else { llvm::IntUGT },
|
|
|
|
hir::BiGe => if signed { llvm::IntSGE } else { llvm::IntUGE },
|
2015-01-29 14:03:34 +02:00
|
|
|
op => {
|
2016-03-29 01:46:02 +02:00
|
|
|
bug!("comparison_op_to_icmp_predicate: expected comparison operator, \
|
|
|
|
found {:?}",
|
|
|
|
op)
|
2015-01-29 14:03:34 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-06-15 11:19:50 -07:00
|
|
|
|
2016-03-29 01:46:02 +02:00
|
|
|
pub fn bin_op_to_fcmp_predicate(op: hir::BinOp_) -> llvm::RealPredicate {
|
2015-01-29 14:03:34 +02:00
|
|
|
match op {
|
2015-07-31 00:04:06 -07:00
|
|
|
hir::BiEq => llvm::RealOEQ,
|
|
|
|
hir::BiNe => llvm::RealUNE,
|
|
|
|
hir::BiLt => llvm::RealOLT,
|
|
|
|
hir::BiLe => llvm::RealOLE,
|
|
|
|
hir::BiGt => llvm::RealOGT,
|
|
|
|
hir::BiGe => llvm::RealOGE,
|
2015-01-29 14:03:34 +02:00
|
|
|
op => {
|
2016-03-29 01:46:02 +02:00
|
|
|
bug!("comparison_op_to_fcmp_predicate: expected comparison operator, \
|
|
|
|
found {:?}",
|
|
|
|
op);
|
2015-01-29 14:03:34 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-17 19:54:32 -07:00
|
|
|
pub fn compare_simd_types<'a, 'tcx>(
|
2016-12-31 16:00:24 -07:00
|
|
|
bcx: &Builder<'a, 'tcx>,
|
2016-12-17 19:54:32 -07:00
|
|
|
lhs: ValueRef,
|
|
|
|
rhs: ValueRef,
|
|
|
|
t: Ty<'tcx>,
|
|
|
|
ret_ty: Type,
|
|
|
|
op: hir::BinOp_
|
|
|
|
) -> ValueRef {
|
2015-01-29 14:03:34 +02:00
|
|
|
let signed = match t.sty {
|
2015-06-11 16:21:46 -07:00
|
|
|
ty::TyFloat(_) => {
|
2016-03-29 01:46:02 +02:00
|
|
|
let cmp = bin_op_to_fcmp_predicate(op);
|
2016-12-11 08:59:20 -07:00
|
|
|
return bcx.sext(bcx.fcmp(cmp, lhs, rhs), ret_ty);
|
2015-01-28 20:20:55 +11:00
|
|
|
},
|
2015-06-11 16:21:46 -07:00
|
|
|
ty::TyUint(_) => false,
|
|
|
|
ty::TyInt(_) => true,
|
2016-03-29 01:46:02 +02:00
|
|
|
_ => bug!("compare_simd_types: invalid SIMD type"),
|
2015-01-28 20:20:55 +11:00
|
|
|
};
|
2015-01-29 14:03:34 +02:00
|
|
|
|
2016-03-29 01:46:02 +02:00
|
|
|
let cmp = bin_op_to_icmp_predicate(op, signed);
|
2015-01-28 20:20:55 +11:00
|
|
|
// LLVM outputs an `< size x i1 >`, so we need to perform a sign extension
|
|
|
|
// to get the correctly sized type. This will compile to a single instruction
|
|
|
|
// once the IR is converted to assembly if the SIMD instruction is supported
|
|
|
|
// by the target architecture.
|
2016-12-11 08:59:20 -07:00
|
|
|
bcx.sext(bcx.icmp(cmp, lhs, rhs), ret_ty)
|
2014-05-02 11:04:46 -07:00
|
|
|
}
|
|
|
|
|
2015-11-11 22:02:51 +02:00
|
|
|
/// Retrieve the information we are losing (making dynamic) in an unsizing
|
|
|
|
/// adjustment.
|
|
|
|
///
|
|
|
|
/// The `old_info` argument is a bit funny. It is intended for use
|
2017-08-11 00:16:18 +02:00
|
|
|
/// in an upcast, where the new vtable for an object will be derived
|
2015-11-11 22:02:51 +02:00
|
|
|
/// from the old one.
|
|
|
|
pub fn unsized_info<'ccx, 'tcx>(ccx: &CrateContext<'ccx, 'tcx>,
|
|
|
|
source: Ty<'tcx>,
|
|
|
|
target: Ty<'tcx>,
|
2016-03-06 17:32:47 +02:00
|
|
|
old_info: Option<ValueRef>)
|
2015-11-11 22:02:51 +02:00
|
|
|
-> ValueRef {
|
|
|
|
let (source, target) = ccx.tcx().struct_lockstep_tails(source, target);
|
|
|
|
match (&source.sty, &target.sty) {
|
2017-08-05 16:11:24 +03:00
|
|
|
(&ty::TyArray(_, len), &ty::TySlice(_)) => {
|
|
|
|
C_usize(ccx, len.val.to_const_int().unwrap().to_u64().unwrap())
|
|
|
|
}
|
2016-11-16 09:21:49 -07:00
|
|
|
(&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
|
2015-11-11 22:02:51 +02:00
|
|
|
// For now, upcasts are limited to changes in marker
|
|
|
|
// traits, and hence never actually require an actual
|
|
|
|
// change to the vtable.
|
|
|
|
old_info.expect("unsized_info: missing old info for trait upcast")
|
|
|
|
}
|
2016-11-16 09:21:49 -07:00
|
|
|
(_, &ty::TyDynamic(ref data, ..)) => {
|
|
|
|
consts::ptrcast(meth::get_vtable(ccx, source, data.principal()),
|
2015-11-11 22:02:51 +02:00
|
|
|
Type::vtable_ptr(ccx))
|
|
|
|
}
|
2016-03-29 01:46:02 +02:00
|
|
|
_ => bug!("unsized_info: invalid unsizing {:?} -> {:?}",
|
2015-11-11 22:02:51 +02:00
|
|
|
source,
|
2016-03-29 01:46:02 +02:00
|
|
|
target),
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Coerce `src` to `dst_ty`. `src_ty` must be a thin pointer.
|
2016-12-17 19:54:32 -07:00
|
|
|
pub fn unsize_thin_ptr<'a, 'tcx>(
|
2016-12-31 16:00:24 -07:00
|
|
|
bcx: &Builder<'a, 'tcx>,
|
2016-12-17 19:54:32 -07:00
|
|
|
src: ValueRef,
|
|
|
|
src_ty: Ty<'tcx>,
|
|
|
|
dst_ty: Ty<'tcx>
|
|
|
|
) -> (ValueRef, ValueRef) {
|
2015-11-11 22:02:51 +02:00
|
|
|
debug!("unsize_thin_ptr: {:?} => {:?}", src_ty, dst_ty);
|
|
|
|
match (&src_ty.sty, &dst_ty.sty) {
|
|
|
|
(&ty::TyRef(_, ty::TypeAndMut { ty: a, .. }),
|
|
|
|
&ty::TyRef(_, ty::TypeAndMut { ty: b, .. })) |
|
|
|
|
(&ty::TyRef(_, ty::TypeAndMut { ty: a, .. }),
|
|
|
|
&ty::TyRawPtr(ty::TypeAndMut { ty: b, .. })) |
|
|
|
|
(&ty::TyRawPtr(ty::TypeAndMut { ty: a, .. }),
|
|
|
|
&ty::TyRawPtr(ty::TypeAndMut { ty: b, .. })) => {
|
2016-12-19 16:25:00 -07:00
|
|
|
assert!(bcx.ccx.shared().type_is_sized(a));
|
|
|
|
let ptr_ty = type_of::in_memory_type_of(bcx.ccx, b).ptr_to();
|
|
|
|
(bcx.pointercast(src, ptr_ty), unsized_info(bcx.ccx, a, b, None))
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
2017-01-21 17:40:31 +03:00
|
|
|
(&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
|
|
|
|
let (a, b) = (src_ty.boxed_ty(), dst_ty.boxed_ty());
|
|
|
|
assert!(bcx.ccx.shared().type_is_sized(a));
|
|
|
|
let ptr_ty = type_of::in_memory_type_of(bcx.ccx, b).ptr_to();
|
|
|
|
(bcx.pointercast(src, ptr_ty), unsized_info(bcx.ccx, a, b, None))
|
|
|
|
}
|
2016-03-29 01:46:02 +02:00
|
|
|
_ => bug!("unsize_thin_ptr: called on bad types"),
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Coerce `src`, which is a reference to a value of type `src_ty`,
|
|
|
|
/// to a value of type `dst_ty` and store the result in `dst`
|
2016-12-31 16:00:24 -07:00
|
|
|
pub fn coerce_unsized_into<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
|
2017-02-06 17:27:09 +01:00
|
|
|
src: &LvalueRef<'tcx>,
|
|
|
|
dst: &LvalueRef<'tcx>) {
|
|
|
|
let src_ty = src.ty.to_ty(bcx.tcx());
|
|
|
|
let dst_ty = dst.ty.to_ty(bcx.tcx());
|
2017-01-21 17:40:31 +03:00
|
|
|
let coerce_ptr = || {
|
|
|
|
let (base, info) = if common::type_is_fat_ptr(bcx.ccx, src_ty) {
|
|
|
|
// fat-ptr to fat-ptr unsize preserves the vtable
|
|
|
|
// i.e. &'a fmt::Debug+Send => &'a fmt::Debug
|
|
|
|
// So we need to pointercast the base to ensure
|
|
|
|
// the types match up.
|
2017-02-06 17:27:09 +01:00
|
|
|
let (base, info) = load_fat_ptr(bcx, src.llval, src.alignment, src_ty);
|
2017-01-21 17:40:31 +03:00
|
|
|
let llcast_ty = type_of::fat_ptr_base_ty(bcx.ccx, dst_ty);
|
|
|
|
let base = bcx.pointercast(base, llcast_ty);
|
|
|
|
(base, info)
|
|
|
|
} else {
|
2017-02-06 17:27:09 +01:00
|
|
|
let base = load_ty(bcx, src.llval, src.alignment, src_ty);
|
2017-01-21 17:40:31 +03:00
|
|
|
unsize_thin_ptr(bcx, base, src_ty, dst_ty)
|
|
|
|
};
|
2017-02-06 17:27:09 +01:00
|
|
|
store_fat_ptr(bcx, base, info, dst.llval, dst.alignment, dst_ty);
|
2017-01-21 17:40:31 +03:00
|
|
|
};
|
2015-11-11 22:02:51 +02:00
|
|
|
match (&src_ty.sty, &dst_ty.sty) {
|
|
|
|
(&ty::TyRef(..), &ty::TyRef(..)) |
|
|
|
|
(&ty::TyRef(..), &ty::TyRawPtr(..)) |
|
|
|
|
(&ty::TyRawPtr(..), &ty::TyRawPtr(..)) => {
|
2017-01-21 17:40:31 +03:00
|
|
|
coerce_ptr()
|
|
|
|
}
|
|
|
|
(&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
|
|
|
|
coerce_ptr()
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
|
|
|
|
2016-08-28 20:44:19 -04:00
|
|
|
(&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) => {
|
2015-11-11 22:02:51 +02:00
|
|
|
assert_eq!(def_a, def_b);
|
|
|
|
|
2016-08-28 20:44:19 -04:00
|
|
|
let src_fields = def_a.variants[0].fields.iter().map(|f| {
|
2017-01-01 08:46:34 -07:00
|
|
|
monomorphize::field_ty(bcx.tcx(), substs_a, f)
|
2016-08-28 20:44:19 -04:00
|
|
|
});
|
|
|
|
let dst_fields = def_b.variants[0].fields.iter().map(|f| {
|
2017-01-01 08:46:34 -07:00
|
|
|
monomorphize::field_ty(bcx.tcx(), substs_b, f)
|
2016-08-28 20:44:19 -04:00
|
|
|
});
|
2015-11-11 22:02:51 +02:00
|
|
|
|
2016-08-28 20:44:19 -04:00
|
|
|
let iter = src_fields.zip(dst_fields).enumerate();
|
2015-11-11 22:02:51 +02:00
|
|
|
for (i, (src_fty, dst_fty)) in iter {
|
2016-12-19 16:25:00 -07:00
|
|
|
if type_is_zero_size(bcx.ccx, dst_fty) {
|
2015-11-19 12:36:31 +01:00
|
|
|
continue;
|
|
|
|
}
|
2015-11-11 22:02:51 +02:00
|
|
|
|
2017-02-06 17:27:09 +01:00
|
|
|
let (src_f, src_f_align) = src.trans_field_ptr(bcx, i);
|
|
|
|
let (dst_f, dst_f_align) = dst.trans_field_ptr(bcx, i);
|
2015-11-11 22:02:51 +02:00
|
|
|
if src_fty == dst_fty {
|
2016-12-29 02:20:26 +01:00
|
|
|
memcpy_ty(bcx, dst_f, src_f, src_fty, None);
|
2015-11-11 22:02:51 +02:00
|
|
|
} else {
|
2017-02-06 17:27:09 +01:00
|
|
|
coerce_unsized_into(
|
|
|
|
bcx,
|
|
|
|
&LvalueRef::new_sized_ty(src_f, src_fty, src_f_align),
|
|
|
|
&LvalueRef::new_sized_ty(dst_f, dst_fty, dst_f_align)
|
|
|
|
);
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-03-29 01:46:02 +02:00
|
|
|
_ => bug!("coerce_unsized_into: invalid coercion {:?} -> {:?}",
|
|
|
|
src_ty,
|
|
|
|
dst_ty),
|
2015-11-11 22:02:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-11 08:59:20 -07:00
|
|
|
pub fn cast_shift_expr_rhs(
|
2016-12-31 16:00:24 -07:00
|
|
|
cx: &Builder, op: hir::BinOp_, lhs: ValueRef, rhs: ValueRef
|
2016-12-11 08:59:20 -07:00
|
|
|
) -> ValueRef {
|
|
|
|
cast_shift_rhs(op, lhs, rhs, |a, b| cx.trunc(a, b), |a, b| cx.zext(a, b))
|
2012-02-21 21:01:33 -08:00
|
|
|
}
|
|
|
|
|
2015-11-19 12:36:31 +01:00
|
|
|
pub fn cast_shift_const_rhs(op: hir::BinOp_, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
|
|
|
|
cast_shift_rhs(op,
|
|
|
|
lhs,
|
|
|
|
rhs,
|
2013-06-16 22:52:44 +12:00
|
|
|
|a, b| unsafe { llvm::LLVMConstTrunc(a, b.to_ref()) },
|
|
|
|
|a, b| unsafe { llvm::LLVMConstZExt(a, b.to_ref()) })
|
2012-02-21 21:01:33 -08:00
|
|
|
}
|
|
|
|
|
2015-07-31 00:04:06 -07:00
|
|
|
fn cast_shift_rhs<F, G>(op: hir::BinOp_,
|
2015-03-19 19:52:08 +01:00
|
|
|
lhs: ValueRef,
|
|
|
|
rhs: ValueRef,
|
|
|
|
trunc: F,
|
|
|
|
zext: G)
|
2015-11-19 12:36:31 +01:00
|
|
|
-> ValueRef
|
|
|
|
where F: FnOnce(ValueRef, Type) -> ValueRef,
|
|
|
|
G: FnOnce(ValueRef, Type) -> ValueRef
|
2014-12-09 13:44:51 -05:00
|
|
|
{
|
2012-02-21 21:01:33 -08:00
|
|
|
// Shifts may have any size int on the rhs
|
2016-03-29 09:32:58 +03:00
|
|
|
if op.is_shift() {
|
2015-01-15 01:08:22 +11:00
|
|
|
let mut rhs_llty = val_ty(rhs);
|
|
|
|
let mut lhs_llty = val_ty(lhs);
|
2015-11-19 12:36:31 +01:00
|
|
|
if rhs_llty.kind() == Vector {
|
|
|
|
rhs_llty = rhs_llty.element_type()
|
|
|
|
}
|
|
|
|
if lhs_llty.kind() == Vector {
|
|
|
|
lhs_llty = lhs_llty.element_type()
|
|
|
|
}
|
2015-01-15 01:08:22 +11:00
|
|
|
let rhs_sz = rhs_llty.int_width();
|
|
|
|
let lhs_sz = lhs_llty.int_width();
|
|
|
|
if lhs_sz < rhs_sz {
|
|
|
|
trunc(rhs, lhs_llty)
|
|
|
|
} else if lhs_sz > rhs_sz {
|
|
|
|
// FIXME (#1877: If shifting by negative
|
|
|
|
// values becomes not undefined then this is wrong.
|
|
|
|
zext(rhs, lhs_llty)
|
2012-02-21 21:01:33 -08:00
|
|
|
} else {
|
|
|
|
rhs
|
|
|
|
}
|
2015-01-15 01:08:22 +11:00
|
|
|
} else {
|
|
|
|
rhs
|
2012-02-21 21:01:33 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-11 11:48:43 -07:00
|
|
|
/// Returns whether this session's target will use SEH-based unwinding.
|
|
|
|
///
|
|
|
|
/// This is only true for MSVC targets, and even then the 64-bit MSVC target
|
|
|
|
/// currently uses SEH-ish unwinding with DWARF info tables to the side (same as
|
|
|
|
/// 64-bit MinGW) instead of "full SEH".
|
|
|
|
pub fn wants_msvc_seh(sess: &Session) -> bool {
|
2015-10-23 18:18:44 -07:00
|
|
|
sess.target.target.options.is_like_msvc
|
2015-08-11 11:48:43 -07:00
|
|
|
}
|
|
|
|
|
2016-10-04 19:24:49 +03:00
|
|
|
pub fn call_assume<'a, 'tcx>(b: &Builder<'a, 'tcx>, val: ValueRef) {
|
|
|
|
let assume_intrinsic = b.ccx.get_intrinsic("llvm.assume");
|
|
|
|
b.call(assume_intrinsic, &[val], None);
|
|
|
|
}
|
|
|
|
|
2014-11-25 21:17:11 -05:00
|
|
|
/// Helper for loading values from memory. Does the necessary conversion if the in-memory type
|
|
|
|
/// differs from the type used for SSA values. Also handles various special cases where the type
|
|
|
|
/// gives us better information about what we are loading.
|
2017-02-06 17:27:09 +01:00
|
|
|
pub fn load_ty<'a, 'tcx>(b: &Builder<'a, 'tcx>, ptr: ValueRef,
|
|
|
|
alignment: Alignment, t: Ty<'tcx>) -> ValueRef {
|
2016-03-09 14:20:22 +02:00
|
|
|
let ccx = b.ccx;
|
|
|
|
if type_is_zero_size(ccx, t) {
|
|
|
|
return C_undef(type_of::type_of(ccx, t));
|
|
|
|
}
|
2015-03-21 00:21:38 +01:00
|
|
|
|
|
|
|
unsafe {
|
|
|
|
let global = llvm::LLVMIsAGlobalVariable(ptr);
|
|
|
|
if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
|
|
|
|
let val = llvm::LLVMGetInitializer(global);
|
|
|
|
if !val.is_null() {
|
2016-03-09 14:20:22 +02:00
|
|
|
if t.is_bool() {
|
|
|
|
return llvm::LLVMConstTrunc(val, Type::i1(ccx).to_ref());
|
|
|
|
}
|
|
|
|
return val;
|
2015-01-29 14:03:34 +02:00
|
|
|
}
|
|
|
|
}
|
2014-07-05 21:47:14 +02:00
|
|
|
}
|
2015-03-21 00:21:38 +01:00
|
|
|
|
2016-03-09 14:20:22 +02:00
|
|
|
if t.is_bool() {
|
2017-02-06 17:27:09 +01:00
|
|
|
b.trunc(b.load_range_assert(ptr, 0, 2, llvm::False, alignment.to_align()),
|
|
|
|
Type::i1(ccx))
|
2015-06-24 08:24:13 +03:00
|
|
|
} else if t.is_char() {
|
2015-03-21 00:21:38 +01:00
|
|
|
// a char is a Unicode codepoint, and so takes values from 0
|
|
|
|
// to 0x10FFFF inclusive only.
|
2017-02-06 17:27:09 +01:00
|
|
|
b.load_range_assert(ptr, 0, 0x10FFFF + 1, llvm::False, alignment.to_align())
|
2017-03-14 01:08:21 +02:00
|
|
|
} else if (t.is_region_ptr() || t.is_box() || t.is_fn())
|
|
|
|
&& !common::type_is_fat_ptr(ccx, t)
|
|
|
|
{
|
2017-02-06 17:27:09 +01:00
|
|
|
b.load_nonnull(ptr, alignment.to_align())
|
2015-03-21 00:21:38 +01:00
|
|
|
} else {
|
2017-02-06 17:27:09 +01:00
|
|
|
b.load(ptr, alignment.to_align())
|
2016-03-09 14:20:22 +02:00
|
|
|
}
|
2014-07-05 21:47:14 +02:00
|
|
|
}
|
|
|
|
|
2014-11-25 21:17:11 -05:00
|
|
|
/// Helper for storing values in memory. Does the necessary conversion if the in-memory type
|
|
|
|
/// differs from the type used for SSA values.
|
2017-02-06 17:27:09 +01:00
|
|
|
pub fn store_ty<'a, 'tcx>(cx: &Builder<'a, 'tcx>, v: ValueRef, dst: ValueRef,
|
|
|
|
dst_align: Alignment, t: Ty<'tcx>) {
|
2016-02-18 19:49:45 +02:00
|
|
|
debug!("store_ty: {:?} : {:?} <- {:?}", Value(dst), t, Value(v));
|
2015-11-09 02:16:19 +02:00
|
|
|
|
2016-12-19 17:48:41 -07:00
|
|
|
if common::type_is_fat_ptr(cx.ccx, t) {
|
2016-12-11 08:59:20 -07:00
|
|
|
let lladdr = cx.extract_value(v, abi::FAT_PTR_ADDR);
|
|
|
|
let llextra = cx.extract_value(v, abi::FAT_PTR_EXTRA);
|
2017-02-06 17:27:09 +01:00
|
|
|
store_fat_ptr(cx, lladdr, llextra, dst, dst_align, t);
|
2015-06-26 16:40:51 +02:00
|
|
|
} else {
|
2017-02-06 17:27:09 +01:00
|
|
|
cx.store(from_immediate(cx, v), dst, dst_align.to_align());
|
2015-04-15 20:14:54 +02:00
|
|
|
}
|
2015-03-21 00:21:38 +01:00
|
|
|
}
|
|
|
|
|
2016-12-31 16:00:24 -07:00
|
|
|
pub fn store_fat_ptr<'a, 'tcx>(cx: &Builder<'a, 'tcx>,
|
2016-12-17 19:54:32 -07:00
|
|
|
data: ValueRef,
|
|
|
|
extra: ValueRef,
|
|
|
|
dst: ValueRef,
|
2017-02-06 17:27:09 +01:00
|
|
|
dst_align: Alignment,
|
2016-12-17 19:54:32 -07:00
|
|
|
_ty: Ty<'tcx>) {
|
2015-11-10 22:05:11 +02:00
|
|
|
// FIXME: emit metadata
|
2017-02-06 17:27:09 +01:00
|
|
|
cx.store(data, get_dataptr(cx, dst), dst_align.to_align());
|
|
|
|
cx.store(extra, get_meta(cx, dst), dst_align.to_align());
|
2015-11-10 22:05:11 +02:00
|
|
|
}
|
|
|
|
|
2016-12-11 15:03:52 -07:00
|
|
|
pub fn load_fat_ptr<'a, 'tcx>(
|
2017-02-06 17:27:09 +01:00
|
|
|
b: &Builder<'a, 'tcx>, src: ValueRef, alignment: Alignment, t: Ty<'tcx>
|
2016-12-17 19:54:32 -07:00
|
|
|
) -> (ValueRef, ValueRef) {
|
2016-12-11 15:03:52 -07:00
|
|
|
let ptr = get_dataptr(b, src);
|
2017-01-21 17:40:31 +03:00
|
|
|
let ptr = if t.is_region_ptr() || t.is_box() {
|
2017-02-06 17:27:09 +01:00
|
|
|
b.load_nonnull(ptr, alignment.to_align())
|
2016-10-04 17:44:31 +03:00
|
|
|
} else {
|
2017-02-06 17:27:09 +01:00
|
|
|
b.load(ptr, alignment.to_align())
|
2016-10-04 17:44:31 +03:00
|
|
|
};
|
|
|
|
|
2017-02-21 21:08:06 +13:00
|
|
|
let meta = get_meta(b, src);
|
|
|
|
let meta_ty = val_ty(meta);
|
|
|
|
// If the 'meta' field is a pointer, it's a vtable, so use load_nonnull
|
|
|
|
// instead
|
|
|
|
let meta = if meta_ty.element_type().kind() == llvm::TypeKind::Pointer {
|
|
|
|
b.load_nonnull(meta, None)
|
|
|
|
} else {
|
|
|
|
b.load(meta, None)
|
|
|
|
};
|
2016-10-04 17:44:31 +03:00
|
|
|
|
|
|
|
(ptr, meta)
|
2015-11-10 22:05:11 +02:00
|
|
|
}
|
|
|
|
|
2016-12-31 16:00:24 -07:00
|
|
|
pub fn from_immediate(bcx: &Builder, val: ValueRef) -> ValueRef {
|
2016-12-19 16:25:00 -07:00
|
|
|
if val_ty(val) == Type::i1(bcx.ccx) {
|
|
|
|
bcx.zext(val, Type::i8(bcx.ccx))
|
2015-03-21 00:21:38 +01:00
|
|
|
} else {
|
|
|
|
val
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-31 16:00:24 -07:00
|
|
|
pub fn to_immediate(bcx: &Builder, val: ValueRef, ty: Ty) -> ValueRef {
|
2015-06-24 08:24:13 +03:00
|
|
|
if ty.is_bool() {
|
2016-12-19 16:25:00 -07:00
|
|
|
bcx.trunc(val, Type::i1(bcx.ccx))
|
2015-03-21 00:21:38 +01:00
|
|
|
} else {
|
|
|
|
val
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-08 00:35:01 +03:00
|
|
|
pub enum Lifetime { Start, End }
|
2016-01-12 17:17:50 +01:00
|
|
|
|
2016-06-08 00:35:01 +03:00
|
|
|
impl Lifetime {
|
2016-12-11 15:03:52 -07:00
|
|
|
// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
|
|
|
|
// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
|
|
|
|
// and the intrinsic for `lt` and passes them to `emit`, which is in
|
|
|
|
// charge of generating code to call the passed intrinsic on whatever
|
2017-08-11 20:34:14 +02:00
|
|
|
// block of generated code is targeted for the intrinsic.
|
2016-12-11 15:03:52 -07:00
|
|
|
//
|
|
|
|
// If LLVM lifetime intrinsic support is disabled (i.e. optimizations
|
|
|
|
// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
|
2016-06-08 00:35:01 +03:00
|
|
|
pub fn call(self, b: &Builder, ptr: ValueRef) {
|
2016-12-11 15:03:52 -07:00
|
|
|
if b.ccx.sess().opts.optimize == config::OptLevel::No {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let size = machine::llsize_of_alloc(b.ccx, val_ty(ptr).element_type());
|
|
|
|
if size == 0 {
|
|
|
|
return;
|
|
|
|
}
|
2016-06-08 00:35:01 +03:00
|
|
|
|
2016-12-11 15:03:52 -07:00
|
|
|
let lifetime_intrinsic = b.ccx.get_intrinsic(match self {
|
|
|
|
Lifetime::Start => "llvm.lifetime.start",
|
|
|
|
Lifetime::End => "llvm.lifetime.end"
|
|
|
|
});
|
|
|
|
|
|
|
|
let ptr = b.pointercast(ptr, Type::i8p(b.ccx));
|
|
|
|
b.call(lifetime_intrinsic, &[C_u64(b.ccx, size), ptr], None);
|
|
|
|
}
|
Emit LLVM lifetime intrinsics to improve stack usage and codegen in general
Lifetime intrinsics help to reduce stack usage, because LLVM can apply
stack coloring to reuse the stack slots of dead allocas for new ones.
For example these functions now both use the same amount of stack, while
previous `bar()` used five times as much as `foo()`:
````rust
fn foo() {
println("{}", 5);
}
fn bar() {
println("{}", 5);
println("{}", 5);
println("{}", 5);
println("{}", 5);
println("{}", 5);
}
````
On top of that, LLVM can also optimize out certain operations when it
knows that memory is dead after a certain point. For example, it can
sometimes remove the zeroing used to cancel the drop glue. This is
possible when the glue drop itself was already removed because the
zeroing dominated the drop glue call. For example in:
````rust
pub fn bar(x: (Box<int>, int)) -> (Box<int>, int) {
x
}
````
With optimizations, this currently results in:
````llvm
define void @_ZN3bar20h330fa42547df8179niaE({ i64*, i64 }* noalias nocapture nonnull sret, { i64*, i64 }* noalias nocapture nonnull) unnamed_addr #0 {
"_ZN29_$LP$Box$LT$int$GT$$C$int$RP$39glue_drop.$x22glue_drop$x22$LP$1347$RP$17h88cf42702e5a322aE.exit":
%2 = bitcast { i64*, i64 }* %1 to i8*
%3 = bitcast { i64*, i64 }* %0 to i8*
tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %3, i8* %2, i64 16, i32 8, i1 false)
tail call void @llvm.memset.p0i8.i64(i8* %2, i8 0, i64 16, i32 8, i1 false)
ret void
}
````
But with lifetime intrinsics we get:
````llvm
define void @_ZN3bar20h330fa42547df8179niaE({ i64*, i64 }* noalias nocapture nonnull sret, { i64*, i64 }* noalias nocapture nonnull) unnamed_addr #0 {
"_ZN29_$LP$Box$LT$int$GT$$C$int$RP$39glue_drop.$x22glue_drop$x22$LP$1347$RP$17h88cf42702e5a322aE.exit":
%2 = bitcast { i64*, i64 }* %1 to i8*
%3 = bitcast { i64*, i64 }* %0 to i8*
tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %3, i8* %2, i64 16, i32 8, i1 false)
tail call void @llvm.lifetime.end(i64 16, i8* %2)
ret void
}
````
Fixes #15665
2014-05-01 19:32:07 +02:00
|
|
|
}
|
|
|
|
|
2016-12-17 19:54:32 -07:00
|
|
|
pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>,
|
2016-03-08 14:29:46 +02:00
|
|
|
dst: ValueRef,
|
|
|
|
src: ValueRef,
|
|
|
|
n_bytes: ValueRef,
|
|
|
|
align: u32) {
|
|
|
|
let ccx = b.ccx;
|
2017-03-24 09:31:26 +01:00
|
|
|
let ptr_width = &ccx.sess().target.target.target_pointer_width;
|
2015-08-15 18:43:39 +12:00
|
|
|
let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
|
2014-04-09 19:56:31 -04:00
|
|
|
let memcpy = ccx.get_intrinsic(&key);
|
2016-03-08 14:29:46 +02:00
|
|
|
let src_ptr = b.pointercast(src, Type::i8p(ccx));
|
|
|
|
let dst_ptr = b.pointercast(dst, Type::i8p(ccx));
|
2017-08-05 12:27:28 +03:00
|
|
|
let size = b.intcast(n_bytes, ccx.isize_ty(), false);
|
2014-03-15 22:29:34 +02:00
|
|
|
let align = C_i32(ccx, align as i32);
|
2014-07-05 21:43:47 +02:00
|
|
|
let volatile = C_bool(ccx, false);
|
2016-03-08 14:29:46 +02:00
|
|
|
b.call(memcpy, &[dst_ptr, src_ptr, size, align, volatile], None);
|
2012-08-28 15:54:45 -07:00
|
|
|
}
|
|
|
|
|
2016-12-31 16:00:24 -07:00
|
|
|
pub fn memcpy_ty<'a, 'tcx>(
|
|
|
|
bcx: &Builder<'a, 'tcx>,
|
|
|
|
dst: ValueRef,
|
|
|
|
src: ValueRef,
|
|
|
|
t: Ty<'tcx>,
|
|
|
|
align: Option<u32>,
|
|
|
|
) {
|
2016-12-19 16:25:00 -07:00
|
|
|
let ccx = bcx.ccx;
|
2015-08-22 17:07:37 +02:00
|
|
|
|
2017-03-02 05:35:25 +02:00
|
|
|
let size = ccx.size_of(t);
|
|
|
|
if size == 0 {
|
2015-08-22 17:07:37 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2017-03-02 05:35:25 +02:00
|
|
|
let align = align.unwrap_or_else(|| ccx.align_of(t));
|
2017-08-05 12:27:28 +03:00
|
|
|
call_memcpy(bcx, dst, src, C_usize(ccx, size), align);
|
2012-08-28 15:54:45 -07:00
|
|
|
}
|
|
|
|
|
2016-12-17 19:54:32 -07:00
|
|
|
pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>,
|
2017-02-08 18:31:03 +01:00
|
|
|
ptr: ValueRef,
|
|
|
|
fill_byte: ValueRef,
|
|
|
|
size: ValueRef,
|
|
|
|
align: ValueRef,
|
|
|
|
volatile: bool) -> ValueRef {
|
2017-03-24 09:31:26 +01:00
|
|
|
let ptr_width = &b.ccx.sess().target.target.target_pointer_width;
|
2016-02-04 19:40:28 +02:00
|
|
|
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
|
2016-12-17 15:52:29 -07:00
|
|
|
let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
|
|
|
|
let volatile = C_bool(b.ccx, volatile);
|
|
|
|
b.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None)
|
2016-02-04 19:40:28 +02:00
|
|
|
}
|
|
|
|
|
2016-11-09 23:09:28 +02:00
|
|
|
pub fn trans_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, instance: Instance<'tcx>) {
|
2016-11-04 17:37:42 -04:00
|
|
|
let _s = if ccx.sess().trans_stats() {
|
|
|
|
let mut instance_name = String::new();
|
|
|
|
DefPathBasedNames::new(ccx.tcx(), true, true)
|
2017-02-08 18:31:03 +01:00
|
|
|
.push_def_path(instance.def_id(), &mut instance_name);
|
2016-11-04 17:37:42 -04:00
|
|
|
Some(StatRecorder::new(ccx, instance_name))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2016-11-09 23:09:28 +02:00
|
|
|
// this is an info! to allow collecting monomorphization statistics
|
|
|
|
// and to allow finding the last function before LLVM aborts from
|
|
|
|
// release builds.
|
|
|
|
info!("trans_instance({})", instance);
|
|
|
|
|
2017-09-12 08:28:17 -07:00
|
|
|
let fn_ty = common::instance_ty(ccx.tcx(), &instance);
|
2017-02-13 10:51:06 +02:00
|
|
|
let sig = common::ty_fn_sig(ccx, fn_ty);
|
|
|
|
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
|
2016-11-09 23:09:28 +02:00
|
|
|
|
|
|
|
let lldecl = match ccx.instances().borrow().get(&instance) {
|
|
|
|
Some(&val) => val,
|
|
|
|
None => bug!("Instance `{:?}` not already declared", instance)
|
|
|
|
};
|
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
ccx.stats().borrow_mut().n_closures += 1;
|
2013-12-22 13:50:04 -08:00
|
|
|
|
2017-03-15 08:09:59 -07:00
|
|
|
// The `uwtable` attribute according to LLVM is:
|
|
|
|
//
|
|
|
|
// This attribute indicates that the ABI being targeted requires that an
|
|
|
|
// unwind table entry be produced for this function even if we can show
|
|
|
|
// that no exceptions passes by it. This is normally the case for the
|
|
|
|
// ELF x86-64 abi, but it can be disabled for some compilation units.
|
|
|
|
//
|
|
|
|
// Typically when we're compiling with `-C panic=abort` (which implies this
|
|
|
|
// `no_landing_pads` check) we don't need `uwtable` because we can't
|
|
|
|
// generate any exceptions! On Windows, however, exceptions include other
|
|
|
|
// events such as illegal instructions, segfaults, etc. This means that on
|
|
|
|
// Windows we end up still needing the `uwtable` attribute even if the `-C
|
|
|
|
// panic=abort` flag is passed.
|
|
|
|
//
|
|
|
|
// You can also find more info on why Windows is whitelisted here in:
|
|
|
|
// https://bugzilla.mozilla.org/show_bug.cgi?id=1302078
|
|
|
|
if !ccx.sess().no_landing_pads() ||
|
|
|
|
ccx.sess().target.target.options.is_like_windows {
|
2016-11-09 23:09:28 +02:00
|
|
|
attributes::emit_uwtable(lldecl, true);
|
rustc: Implement custom panic runtimes
This commit is an implementation of [RFC 1513] which allows applications to
alter the behavior of panics at compile time. A new compiler flag, `-C panic`,
is added and accepts the values `unwind` or `panic`, with the default being
`unwind`. This model affects how code is generated for the local crate, skipping
generation of landing pads with `-C panic=abort`.
[RFC 1513]: https://github.com/rust-lang/rfcs/blob/master/text/1513-less-unwinding.md
Panic implementations are then provided by crates tagged with
`#![panic_runtime]` and lazily required by crates with
`#![needs_panic_runtime]`. The panic strategy (`-C panic` value) of the panic
runtime must match the final product, and if the panic strategy is not `abort`
then the entire DAG must have the same panic strategy.
With the `-C panic=abort` strategy, users can expect a stable method to disable
generation of landing pads, improving optimization in niche scenarios,
decreasing compile time, and decreasing output binary size. With the `-C
panic=unwind` strategy users can expect the existing ability to isolate failure
in Rust code from the outside world.
Organizationally, this commit dismantles the `sys_common::unwind` module in
favor of some bits moving part of it to `libpanic_unwind` and the rest into the
`panicking` module in libstd. The custom panic runtime support is pretty similar
to the custom allocator support with the only major difference being how the
panic runtime is injected (takes the `-C panic` flag into account).
2016-04-08 16:18:40 -07:00
|
|
|
}
|
2010-11-26 17:47:27 -08:00
|
|
|
|
2017-02-08 18:31:03 +01:00
|
|
|
let mir = ccx.tcx().instance_mir(instance.def);
|
2017-02-13 10:51:06 +02:00
|
|
|
mir::trans_mir(ccx, lldecl, &mir, instance, sig);
|
2011-06-29 19:50:50 -07:00
|
|
|
}
|
|
|
|
|
2017-09-12 11:04:46 -07:00
|
|
|
pub fn linkage_by_name(name: &str) -> Option<Linkage> {
|
|
|
|
use rustc::middle::trans::Linkage::*;
|
|
|
|
|
2014-11-11 20:22:41 -05:00
|
|
|
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only
|
|
|
|
// applicable to variable declarations and may not really make sense for
|
|
|
|
// Rust code in the first place but whitelist them anyway and trust that
|
|
|
|
// the user knows what s/he's doing. Who knows, unanticipated use cases
|
|
|
|
// may pop up in the future.
|
|
|
|
//
|
|
|
|
// ghost, dllimport, dllexport and linkonce_odr_autohide are not supported
|
|
|
|
// and don't have to be, LLVM treats them as no-ops.
|
|
|
|
match name {
|
2017-09-12 11:04:46 -07:00
|
|
|
"appending" => Some(Appending),
|
|
|
|
"available_externally" => Some(AvailableExternally),
|
|
|
|
"common" => Some(Common),
|
|
|
|
"extern_weak" => Some(ExternalWeak),
|
|
|
|
"external" => Some(External),
|
|
|
|
"internal" => Some(Internal),
|
|
|
|
"linkonce" => Some(LinkOnceAny),
|
|
|
|
"linkonce_odr" => Some(LinkOnceODR),
|
|
|
|
"private" => Some(Private),
|
|
|
|
"weak" => Some(WeakAny),
|
|
|
|
"weak_odr" => Some(WeakODR),
|
2014-11-11 20:22:41 -05:00
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-06 20:02:09 -04:00
|
|
|
pub fn set_link_section(ccx: &CrateContext,
|
|
|
|
llval: ValueRef,
|
|
|
|
attrs: &[ast::Attribute]) {
|
|
|
|
if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") {
|
2016-11-16 10:52:37 +00:00
|
|
|
if contains_null(§.as_str()) {
|
2016-06-13 22:43:30 -07:00
|
|
|
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", §));
|
|
|
|
}
|
|
|
|
unsafe {
|
2016-11-16 10:52:37 +00:00
|
|
|
let buf = CString::new(sect.as_str().as_bytes()).unwrap();
|
2016-06-13 22:43:30 -07:00
|
|
|
llvm::LLVMSetSection(llval, buf.as_ptr());
|
|
|
|
}
|
2015-08-03 15:38:06 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-31 18:51:39 +02:00
|
|
|
// check for the #[rustc_error] annotation, which forces an
|
|
|
|
// error in trans. This is used to write compile-fail tests
|
|
|
|
// that actually test that compilation succeeds without
|
|
|
|
// reporting an error.
|
|
|
|
fn check_for_rustc_errors_attr(tcx: TyCtxt) {
|
|
|
|
if let Some((id, span)) = *tcx.sess.entry_fn.borrow() {
|
|
|
|
let main_def_id = tcx.hir.local_def_id(id);
|
|
|
|
|
|
|
|
if tcx.has_attr(main_def_id, "rustc_error") {
|
|
|
|
tcx.sess.span_fatal(span, "compilation successful");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-15 21:45:21 +02:00
|
|
|
/// Create the `main` function which will initialize the rust runtime and call
|
2017-02-07 22:46:21 +01:00
|
|
|
/// users main function.
|
2017-07-31 18:51:39 +02:00
|
|
|
fn maybe_create_entry_wrapper(ccx: &CrateContext) {
|
2016-05-26 12:18:39 -04:00
|
|
|
let (main_def_id, span) = match *ccx.sess().entry_fn.borrow() {
|
|
|
|
Some((id, span)) => {
|
2017-01-26 02:41:06 +02:00
|
|
|
(ccx.tcx().hir.local_def_id(id), span)
|
2016-05-26 12:18:39 -04:00
|
|
|
}
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
|
2017-02-08 18:31:03 +01:00
|
|
|
let instance = Instance::mono(ccx.tcx(), main_def_id);
|
2016-05-26 12:18:39 -04:00
|
|
|
|
2016-07-21 12:49:59 -04:00
|
|
|
if !ccx.codegen_unit().contains_item(&TransItem::Fn(instance)) {
|
2016-05-26 12:18:39 -04:00
|
|
|
// We want to create the wrapper in the same codegen unit as Rust's main
|
|
|
|
// function.
|
|
|
|
return;
|
2013-01-11 18:08:01 +09:00
|
|
|
}
|
|
|
|
|
2017-03-08 18:33:21 +02:00
|
|
|
let main_llfn = callee::get_fn(ccx, instance);
|
2016-05-26 12:18:39 -04:00
|
|
|
|
2014-03-05 16:36:01 +02:00
|
|
|
let et = ccx.sess().entry_type.get().unwrap();
|
2013-08-03 19:59:46 -07:00
|
|
|
match et {
|
2016-12-16 18:48:25 -07:00
|
|
|
config::EntryMain => create_entry_fn(ccx, span, main_llfn, true),
|
2016-05-26 12:18:39 -04:00
|
|
|
config::EntryStart => create_entry_fn(ccx, span, main_llfn, false),
|
2014-05-06 23:38:01 +12:00
|
|
|
config::EntryNone => {} // Do nothing.
|
2013-04-09 20:16:06 +12:00
|
|
|
}
|
2011-08-12 18:43:44 -07:00
|
|
|
|
2014-03-06 18:47:24 +02:00
|
|
|
fn create_entry_fn(ccx: &CrateContext,
|
2015-03-04 11:46:55 +02:00
|
|
|
sp: Span,
|
2013-04-18 15:53:29 -07:00
|
|
|
rust_main: ValueRef,
|
|
|
|
use_start_lang_item: bool) {
|
2017-08-05 12:27:28 +03:00
|
|
|
let llfty = Type::func(&[ccx.isize_ty(), Type::i8p(ccx).ptr_to()], &ccx.isize_ty());
|
2012-11-30 09:21:49 +09:00
|
|
|
|
2016-02-23 21:46:08 +02:00
|
|
|
if declare::get_defined_value(ccx, "main").is_some() {
|
2015-03-04 01:08:06 +02:00
|
|
|
// FIXME: We should be smart and show a better diagnostic here.
|
2015-12-21 10:00:43 +13:00
|
|
|
ccx.sess().struct_span_err(sp, "entry symbol `main` defined multiple times")
|
|
|
|
.help("did you use #[no_mangle] on `fn main`? Use #[start] instead")
|
|
|
|
.emit();
|
2015-03-04 01:08:06 +02:00
|
|
|
ccx.sess().abort_if_errors();
|
2016-03-29 01:46:02 +02:00
|
|
|
bug!();
|
2016-02-23 21:46:08 +02:00
|
|
|
}
|
|
|
|
let llfn = declare::declare_cfn(ccx, "main", llfty);
|
2014-08-18 14:15:05 -04:00
|
|
|
|
2016-11-03 10:53:13 +01:00
|
|
|
// `main` should respect same config for frame pointer elimination as rest of code
|
|
|
|
attributes::set_frame_pointer_elimination(ccx, llfn);
|
|
|
|
|
2016-12-31 16:00:24 -07:00
|
|
|
let bld = Builder::new_block(ccx, llfn, "top");
|
2014-12-03 14:48:18 -08:00
|
|
|
|
2016-12-16 18:48:25 -07:00
|
|
|
debuginfo::gdb::insert_reference_to_gdb_debug_scripts_section_global(ccx, &bld);
|
2013-04-18 15:53:29 -07:00
|
|
|
|
2016-12-16 18:48:25 -07:00
|
|
|
let (start_fn, args) = if use_start_lang_item {
|
|
|
|
let start_def_id = ccx.tcx().require_lang_item(StartFnLangItem);
|
2017-03-08 18:33:21 +02:00
|
|
|
let start_instance = Instance::mono(ccx.tcx(), start_def_id);
|
|
|
|
let start_fn = callee::get_fn(ccx, start_instance);
|
2016-12-16 18:48:25 -07:00
|
|
|
(start_fn, vec![bld.pointercast(rust_main, Type::i8p(ccx).ptr_to()), get_param(llfn, 0),
|
|
|
|
get_param(llfn, 1)])
|
|
|
|
} else {
|
|
|
|
debug!("using user-defined start fn");
|
|
|
|
(rust_main, vec![get_param(llfn, 0 as c_uint), get_param(llfn, 1 as c_uint)])
|
|
|
|
};
|
2013-07-07 13:30:48 -07:00
|
|
|
|
2016-12-16 18:48:25 -07:00
|
|
|
let result = bld.call(start_fn, &args, None);
|
|
|
|
bld.ret(result);
|
2011-10-20 13:48:10 +02:00
|
|
|
}
|
2011-02-28 17:33:46 -05:00
|
|
|
}
|
|
|
|
|
2014-09-23 00:14:46 -07:00
|
|
|
fn contains_null(s: &str) -> bool {
|
2014-09-23 12:54:16 -07:00
|
|
|
s.bytes().any(|b| b == 0)
|
2014-09-23 00:14:46 -07:00
|
|
|
}
|
|
|
|
|
2017-04-13 18:21:51 -04:00
|
|
|
fn write_metadata<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
|
|
|
|
link_meta: &LinkMeta,
|
|
|
|
exported_symbols: &NodeSet)
|
2017-07-26 15:02:53 +02:00
|
|
|
-> (ContextRef, ModuleRef,
|
|
|
|
EncodedMetadata, EncodedMetadataHashes) {
|
2017-06-08 14:10:36 -07:00
|
|
|
use std::io::Write;
|
|
|
|
use flate2::Compression;
|
2017-07-10 17:54:50 +02:00
|
|
|
use flate2::write::DeflateEncoder;
|
Store metadata separately in rlib files
Right now whenever an rlib file is linked against, all of the metadata from the
rlib is pulled in to the final staticlib or binary. The reason for this is that
the metadata is currently stored in a section of the object file. Note that this
is intentional for dynamic libraries in order to distribute metadata bundled
with static libraries.
This commit alters the situation for rlib libraries to instead store the
metadata in a separate file in the archive. In doing so, when the archive is
passed to the linker, none of the metadata will get pulled into the result
executable. Furthermore, the metadata file is skipped when assembling rlibs into
an archive.
The snag in this implementation comes with multiple output formats. When
generating a dylib, the metadata needs to be in the object file, but when
generating an rlib this needs to be separate. In order to accomplish this, the
metadata variable is inserted into an entirely separate LLVM Module which is
then codegen'd into a different location (foo.metadata.o). This is then linked
into dynamic libraries and silently ignored for rlib files.
While changing how metadata is inserted into archives, I have also stopped
compressing metadata when inserted into rlib files. We have wanted to stop
compressing metadata, but the sections it creates in object file sections are
apparently too large. Thankfully if it's just an arbitrary file it doesn't
matter how large it is.
I have seen massive reductions in executable sizes, as well as staticlib output
sizes (to confirm that this is all working).
2013-12-03 17:41:01 -08:00
|
|
|
|
2017-04-13 11:48:19 -04:00
|
|
|
let (metadata_llcx, metadata_llmod) = unsafe {
|
2017-04-13 18:21:51 -04:00
|
|
|
context::create_context_and_module(tcx.sess, "metadata")
|
2017-04-13 11:48:19 -04:00
|
|
|
};
|
|
|
|
|
2016-10-19 12:06:46 +11:00
|
|
|
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
|
|
|
enum MetadataKind {
|
|
|
|
None,
|
|
|
|
Uncompressed,
|
|
|
|
Compressed
|
|
|
|
}
|
|
|
|
|
2017-04-13 18:21:51 -04:00
|
|
|
let kind = tcx.sess.crate_types.borrow().iter().map(|ty| {
|
2016-10-19 12:06:46 +11:00
|
|
|
match *ty {
|
|
|
|
config::CrateTypeExecutable |
|
|
|
|
config::CrateTypeStaticlib |
|
|
|
|
config::CrateTypeCdylib => MetadataKind::None,
|
|
|
|
|
2016-12-23 19:39:20 +13:00
|
|
|
config::CrateTypeRlib => MetadataKind::Uncompressed,
|
2016-10-19 12:06:46 +11:00
|
|
|
|
|
|
|
config::CrateTypeDylib |
|
|
|
|
config::CrateTypeProcMacro => MetadataKind::Compressed,
|
|
|
|
}
|
|
|
|
}).max().unwrap();
|
|
|
|
|
|
|
|
if kind == MetadataKind::None {
|
2017-07-26 15:02:53 +02:00
|
|
|
return (metadata_llcx,
|
|
|
|
metadata_llmod,
|
|
|
|
EncodedMetadata::new(),
|
|
|
|
EncodedMetadataHashes::new());
|
2013-12-22 14:40:03 -08:00
|
|
|
}
|
2013-06-13 19:19:50 +12:00
|
|
|
|
2017-09-07 13:21:46 -07:00
|
|
|
let (metadata, hashes) = tcx.encode_metadata(link_meta, exported_symbols);
|
2016-10-19 12:06:46 +11:00
|
|
|
if kind == MetadataKind::Uncompressed {
|
2017-07-26 15:02:53 +02:00
|
|
|
return (metadata_llcx, metadata_llmod, metadata, hashes);
|
2016-10-19 12:06:46 +11:00
|
|
|
}
|
|
|
|
|
|
|
|
assert!(kind == MetadataKind::Compressed);
|
2017-09-07 13:21:46 -07:00
|
|
|
let mut compressed = tcx.metadata_encoding_version();
|
2017-07-10 17:54:50 +02:00
|
|
|
DeflateEncoder::new(&mut compressed, Compression::Fast)
|
2017-06-08 14:10:36 -07:00
|
|
|
.write_all(&metadata.raw_data).unwrap();
|
2015-11-21 01:08:09 +02:00
|
|
|
|
2017-04-13 11:48:19 -04:00
|
|
|
let llmeta = C_bytes_in_context(metadata_llcx, &compressed);
|
|
|
|
let llconst = C_struct_in_context(metadata_llcx, &[llmeta], false);
|
2017-04-13 18:21:51 -04:00
|
|
|
let name = symbol_export::metadata_symbol_name(tcx);
|
2015-02-17 22:47:40 -08:00
|
|
|
let buf = CString::new(name).unwrap();
|
2014-11-25 13:28:35 -08:00
|
|
|
let llglobal = unsafe {
|
2017-04-13 11:48:19 -04:00
|
|
|
llvm::LLVMAddGlobal(metadata_llmod, val_ty(llconst).to_ref(), buf.as_ptr())
|
2014-11-25 13:28:35 -08:00
|
|
|
};
|
2013-01-10 21:23:07 -08:00
|
|
|
unsafe {
|
|
|
|
llvm::LLVMSetInitializer(llglobal, llconst);
|
2017-04-26 23:22:45 +02:00
|
|
|
let section_name = metadata::metadata_section_name(&tcx.sess.target.target);
|
2016-08-14 11:16:28 +03:00
|
|
|
let name = CString::new(section_name).unwrap();
|
|
|
|
llvm::LLVMSetSection(llglobal, name.as_ptr());
|
|
|
|
|
|
|
|
// Also generate a .section directive to force no
|
|
|
|
// flags, at least for ELF outputs, so that the
|
|
|
|
// metadata doesn't get loaded into memory.
|
|
|
|
let directive = format!(".section {}", section_name);
|
|
|
|
let directive = CString::new(directive).unwrap();
|
2017-04-13 11:48:19 -04:00
|
|
|
llvm::LLVMSetModuleInlineAsm(metadata_llmod, directive.as_ptr())
|
2013-01-10 21:23:07 -08:00
|
|
|
}
|
2017-07-26 15:02:53 +02:00
|
|
|
return (metadata_llcx, metadata_llmod, metadata, hashes);
|
2011-06-27 16:09:28 -07:00
|
|
|
}
|
|
|
|
|
2015-08-21 00:41:07 -07:00
|
|
|
// Create a `__imp_<symbol> = &symbol` global for every public static `symbol`.
|
|
|
|
// This is required to satisfy `dllimport` references to static data in .rlibs
|
|
|
|
// when using MSVC linker. We do this only for data, as linker can fix up
|
|
|
|
// code references on its own.
|
|
|
|
// See #26591, #27438
|
2017-04-13 13:14:37 -04:00
|
|
|
fn create_imps(sess: &Session,
|
2017-07-26 14:29:13 +02:00
|
|
|
llvm_module: &ModuleLlvm) {
|
2015-09-29 16:26:34 -07:00
|
|
|
// The x86 ABI seems to require that leading underscores are added to symbol
|
|
|
|
// names, so we need an extra underscore on 32-bit. There's also a leading
|
|
|
|
// '\x01' here which disables LLVM's symbol mangling (e.g. no extra
|
|
|
|
// underscores added in front).
|
2017-04-13 13:14:37 -04:00
|
|
|
let prefix = if sess.target.target.target_pointer_width == "32" {
|
2015-09-29 16:26:34 -07:00
|
|
|
"\x01__imp__"
|
|
|
|
} else {
|
|
|
|
"\x01__imp_"
|
|
|
|
};
|
2015-08-21 00:41:07 -07:00
|
|
|
unsafe {
|
2017-07-26 14:29:13 +02:00
|
|
|
let exported: Vec<_> = iter_globals(llvm_module.llmod)
|
|
|
|
.filter(|&val| {
|
|
|
|
llvm::LLVMRustGetLinkage(val) ==
|
|
|
|
llvm::Linkage::ExternalLinkage &&
|
|
|
|
llvm::LLVMIsDeclaration(val) == 0
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
let i8p_ty = Type::i8p_llcx(llvm_module.llcx);
|
|
|
|
for val in exported {
|
|
|
|
let name = CStr::from_ptr(llvm::LLVMGetValueName(val));
|
|
|
|
let mut imp_name = prefix.as_bytes().to_vec();
|
|
|
|
imp_name.extend(name.to_bytes());
|
|
|
|
let imp_name = CString::new(imp_name).unwrap();
|
|
|
|
let imp = llvm::LLVMAddGlobal(llvm_module.llmod,
|
|
|
|
i8p_ty.to_ref(),
|
|
|
|
imp_name.as_ptr() as *const _);
|
2017-07-30 20:43:53 +03:00
|
|
|
llvm::LLVMSetInitializer(imp, consts::ptrcast(val, i8p_ty));
|
2017-07-26 14:29:13 +02:00
|
|
|
llvm::LLVMRustSetLinkage(imp, llvm::Linkage::ExternalLinkage);
|
2015-08-21 00:41:07 -07:00
|
|
|
}
|
2014-08-01 10:29:44 -07:00
|
|
|
}
|
2015-08-21 00:41:07 -07:00
|
|
|
}
|
2014-08-01 10:29:44 -07:00
|
|
|
|
2015-08-21 00:41:07 -07:00
|
|
|
struct ValueIter {
|
|
|
|
cur: ValueRef,
|
|
|
|
step: unsafe extern "C" fn(ValueRef) -> ValueRef,
|
|
|
|
}
|
2014-08-01 10:29:44 -07:00
|
|
|
|
2015-08-21 00:41:07 -07:00
|
|
|
impl Iterator for ValueIter {
|
|
|
|
type Item = ValueRef;
|
2014-08-01 10:29:44 -07:00
|
|
|
|
2015-08-21 00:41:07 -07:00
|
|
|
fn next(&mut self) -> Option<ValueRef> {
|
|
|
|
let old = self.cur;
|
|
|
|
if !old.is_null() {
|
2015-10-17 20:15:26 -04:00
|
|
|
self.cur = unsafe { (self.step)(old) };
|
2015-08-21 00:41:07 -07:00
|
|
|
Some(old)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2014-08-01 10:29:44 -07:00
|
|
|
}
|
2015-08-21 00:41:07 -07:00
|
|
|
}
|
2014-08-01 10:29:44 -07:00
|
|
|
|
2015-08-21 00:41:07 -07:00
|
|
|
fn iter_globals(llmod: llvm::ModuleRef) -> ValueIter {
|
|
|
|
unsafe {
|
|
|
|
ValueIter {
|
|
|
|
cur: llvm::LLVMGetFirstGlobal(llmod),
|
|
|
|
step: llvm::LLVMGetNextGlobal,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-01-01 23:54:03 -05:00
|
|
|
|
2016-05-03 05:23:22 +03:00
|
|
|
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
2017-09-13 20:26:39 -07:00
|
|
|
rx: mpsc::Receiver<Box<Any + Send>>)
|
2017-07-21 15:14:21 +02:00
|
|
|
-> OngoingCrateTranslation {
|
2017-08-26 17:31:32 +02:00
|
|
|
use rustc_trans_utils::find_exported_symbols;
|
2017-08-13 18:53:50 +02:00
|
|
|
|
2017-07-31 18:51:39 +02:00
|
|
|
check_for_rustc_errors_attr(tcx);
|
|
|
|
|
2017-09-18 18:03:09 +02:00
|
|
|
|
|
|
|
let crate_hash = tcx.dep_graph
|
|
|
|
.fingerprint_of(&DepNode::new_no_params(DepKind::Krate))
|
|
|
|
.unwrap();
|
|
|
|
let link_meta = link::build_link_meta(crate_hash);
|
2017-09-12 09:04:24 -07:00
|
|
|
let exported_symbol_node_ids = find_exported_symbols(tcx);
|
2017-07-26 15:02:53 +02:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
let shared_ccx = SharedCrateContext::new(tcx);
|
2016-05-25 08:46:36 +03:00
|
|
|
// Translate the metadata.
|
2017-07-26 15:02:53 +02:00
|
|
|
let (metadata_llcx, metadata_llmod, metadata, metadata_incr_hashes) =
|
2017-04-13 11:48:19 -04:00
|
|
|
time(tcx.sess.time_passes(), "write metadata", || {
|
2017-07-12 17:37:58 +02:00
|
|
|
write_metadata(tcx, &link_meta, &exported_symbol_node_ids)
|
2017-04-13 11:48:19 -04:00
|
|
|
});
|
2016-05-25 08:46:36 +03:00
|
|
|
|
|
|
|
let metadata_module = ModuleTranslation {
|
2017-02-07 12:24:43 -05:00
|
|
|
name: link::METADATA_MODULE_NAME.to_string(),
|
2016-07-21 12:49:59 -04:00
|
|
|
symbol_name_hash: 0, // we always rebuild metadata, at least for now
|
|
|
|
source: ModuleSource::Translated(ModuleLlvm {
|
2017-04-13 11:48:19 -04:00
|
|
|
llcx: metadata_llcx,
|
|
|
|
llmod: metadata_llmod,
|
2016-07-21 12:49:59 -04:00
|
|
|
}),
|
2017-07-25 17:26:24 +02:00
|
|
|
kind: ModuleKind::Metadata,
|
2016-05-25 08:46:36 +03:00
|
|
|
};
|
2017-06-03 14:54:08 -07:00
|
|
|
|
2017-07-27 13:02:31 +02:00
|
|
|
let time_graph = if tcx.sess.opts.debugging_opts.trans_time_graph {
|
|
|
|
Some(time_graph::TimeGraph::new())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2016-05-25 08:46:36 +03:00
|
|
|
|
2017-01-19 11:32:55 -05:00
|
|
|
// Skip crate items and just output metadata in -Z no-trans mode.
|
|
|
|
if tcx.sess.opts.debugging_opts.no_trans ||
|
|
|
|
!tcx.sess.opts.output_types.should_trans() {
|
2017-07-26 16:12:34 +02:00
|
|
|
let ongoing_translation = write::start_async_translation(
|
2017-09-13 13:22:20 -07:00
|
|
|
tcx,
|
2017-07-27 13:02:31 +02:00
|
|
|
time_graph.clone(),
|
2017-07-26 12:35:23 +02:00
|
|
|
link_meta,
|
2017-09-13 16:03:24 -07:00
|
|
|
metadata,
|
|
|
|
rx);
|
2017-07-26 12:35:23 +02:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
|
|
|
|
ongoing_translation.translation_finished(tcx);
|
2017-07-26 12:35:23 +02:00
|
|
|
|
2017-07-26 15:02:53 +02:00
|
|
|
assert_and_save_dep_graph(tcx,
|
|
|
|
metadata_incr_hashes,
|
|
|
|
link_meta);
|
|
|
|
|
2017-07-27 16:59:30 +02:00
|
|
|
ongoing_translation.check_for_errors(tcx.sess);
|
|
|
|
|
2017-07-26 12:35:23 +02:00
|
|
|
return ongoing_translation;
|
2017-01-19 11:32:55 -05:00
|
|
|
}
|
|
|
|
|
2016-05-26 12:18:39 -04:00
|
|
|
// Run the translation item collector and partition the collected items into
|
|
|
|
// codegen units.
|
2017-09-13 15:24:13 -07:00
|
|
|
let codegen_units =
|
|
|
|
shared_ccx.tcx().collect_and_partition_translation_items(LOCAL_CRATE).1;
|
|
|
|
let codegen_units = (*codegen_units).clone();
|
2017-07-12 17:37:58 +02:00
|
|
|
|
2017-07-26 14:21:01 +02:00
|
|
|
assert!(codegen_units.len() <= 1 || !tcx.sess.lto());
|
|
|
|
|
2017-07-26 16:12:34 +02:00
|
|
|
let ongoing_translation = write::start_async_translation(
|
2017-09-13 13:22:20 -07:00
|
|
|
tcx,
|
2017-07-27 13:02:31 +02:00
|
|
|
time_graph.clone(),
|
2017-07-26 15:02:53 +02:00
|
|
|
link_meta,
|
2017-09-13 16:03:24 -07:00
|
|
|
metadata,
|
|
|
|
rx);
|
2017-07-26 15:02:53 +02:00
|
|
|
|
2017-07-27 16:59:30 +02:00
|
|
|
// Translate an allocator shim, if any
|
|
|
|
//
|
|
|
|
// If LTO is enabled and we've got some previous LLVM module we translated
|
|
|
|
// above, then we can just translate directly into that LLVM module. If not,
|
|
|
|
// however, we need to create a separate module and trans into that. Note
|
|
|
|
// that the separate translation is critical for the standard library where
|
|
|
|
// the rlib's object file doesn't have allocator functions but the dylib
|
|
|
|
// links in an object file that has allocator functions. When we're
|
|
|
|
// compiling a final LTO artifact, though, there's no need to worry about
|
|
|
|
// this as we're not working with this dual "rlib/dylib" functionality.
|
|
|
|
let allocator_module = if tcx.sess.lto() {
|
|
|
|
None
|
|
|
|
} else if let Some(kind) = tcx.sess.allocator_kind.get() {
|
|
|
|
unsafe {
|
|
|
|
let (llcx, llmod) =
|
|
|
|
context::create_context_and_module(tcx.sess, "allocator");
|
|
|
|
let modules = ModuleLlvm {
|
2017-08-06 22:54:09 -07:00
|
|
|
llmod,
|
|
|
|
llcx,
|
2017-07-27 16:59:30 +02:00
|
|
|
};
|
|
|
|
time(tcx.sess.time_passes(), "write allocator module", || {
|
|
|
|
allocator::trans(tcx, &modules, kind)
|
|
|
|
});
|
|
|
|
|
|
|
|
Some(ModuleTranslation {
|
|
|
|
name: link::ALLOCATOR_MODULE_NAME.to_string(),
|
|
|
|
symbol_name_hash: 0, // we always rebuild allocator shims
|
|
|
|
source: ModuleSource::Translated(modules),
|
|
|
|
kind: ModuleKind::Allocator,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(allocator_module) = allocator_module {
|
2017-09-13 20:26:39 -07:00
|
|
|
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, allocator_module);
|
2017-07-27 16:59:30 +02:00
|
|
|
}
|
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
|
2017-07-26 16:02:32 +02:00
|
|
|
|
2017-07-28 14:28:08 +02:00
|
|
|
// We sort the codegen units by size. This way we can schedule work for LLVM
|
|
|
|
// a bit more efficiently. Note that "size" is defined rather crudely at the
|
|
|
|
// moment as it is just the number of TransItems in the CGU, not taking into
|
|
|
|
// account the size of each TransItem.
|
|
|
|
let codegen_units = {
|
|
|
|
let mut codegen_units = codegen_units;
|
|
|
|
codegen_units.sort_by_key(|cgu| -(cgu.items().len() as isize));
|
|
|
|
codegen_units
|
|
|
|
};
|
|
|
|
|
2017-07-31 14:51:47 +02:00
|
|
|
let mut total_trans_time = Duration::new(0, 0);
|
2017-09-13 20:26:39 -07:00
|
|
|
let mut all_stats = Stats::default();
|
2017-07-31 14:51:47 +02:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
for cgu in codegen_units.into_iter() {
|
2017-07-27 16:59:30 +02:00
|
|
|
ongoing_translation.wait_for_signal_to_translate_item();
|
2017-07-26 16:02:32 +02:00
|
|
|
ongoing_translation.check_for_errors(tcx.sess);
|
2017-07-27 13:02:31 +02:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
let _timing_guard = time_graph
|
|
|
|
.as_ref()
|
|
|
|
.map(|time_graph| time_graph.start(write::TRANS_WORKER_TIMELINE,
|
|
|
|
write::TRANS_WORK_PACKAGE_KIND));
|
2017-07-28 14:28:08 +02:00
|
|
|
let start_time = Instant::now();
|
2017-09-13 20:26:39 -07:00
|
|
|
all_stats.extend(tcx.compile_codegen_unit(*cgu.name()));
|
|
|
|
total_trans_time += start_time.elapsed();
|
2017-07-27 16:59:30 +02:00
|
|
|
ongoing_translation.check_for_errors(tcx.sess);
|
2017-07-26 16:02:32 +02:00
|
|
|
}
|
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
ongoing_translation.translation_finished(tcx);
|
|
|
|
|
2017-07-31 14:51:47 +02:00
|
|
|
// Since the main thread is sometimes blocked during trans, we keep track
|
|
|
|
// -Ztime-passes output manually.
|
|
|
|
print_time_passes_entry(tcx.sess.time_passes(),
|
|
|
|
"translate to LLVM IR",
|
|
|
|
total_trans_time);
|
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
if tcx.sess.opts.incremental.is_some() {
|
|
|
|
DISPOSITIONS.with(|d| {
|
|
|
|
assert_module_sources::assert_module_sources(tcx, &d.borrow());
|
|
|
|
});
|
2016-05-26 12:18:39 -04:00
|
|
|
}
|
|
|
|
|
2017-04-14 15:30:06 -04:00
|
|
|
symbol_names_test::report_symbol_names(tcx);
|
2016-05-26 12:18:39 -04:00
|
|
|
|
2014-07-16 11:27:57 -07:00
|
|
|
if shared_ccx.sess().trans_stats() {
|
2014-01-09 21:06:55 +11:00
|
|
|
println!("--- trans stats ---");
|
2017-09-13 20:26:39 -07:00
|
|
|
println!("n_glues_created: {}", all_stats.n_glues_created);
|
|
|
|
println!("n_null_glues: {}", all_stats.n_null_glues);
|
|
|
|
println!("n_real_glues: {}", all_stats.n_real_glues);
|
2014-07-16 11:27:57 -07:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
println!("n_fns: {}", all_stats.n_fns);
|
|
|
|
println!("n_inlines: {}", all_stats.n_inlines);
|
|
|
|
println!("n_closures: {}", all_stats.n_closures);
|
2014-01-09 21:06:55 +11:00
|
|
|
println!("fn stats:");
|
2017-09-13 20:26:39 -07:00
|
|
|
all_stats.fn_stats.sort_by_key(|&(_, insns)| insns);
|
|
|
|
for &(ref name, insns) in all_stats.fn_stats.iter() {
|
|
|
|
println!("{} insns, {}", insns, *name);
|
2013-06-28 11:15:34 -07:00
|
|
|
}
|
2013-06-13 14:49:01 +12:00
|
|
|
}
|
2016-05-26 12:18:39 -04:00
|
|
|
|
2014-07-16 11:27:57 -07:00
|
|
|
if shared_ccx.sess().count_llvm_insns() {
|
2017-09-13 20:26:39 -07:00
|
|
|
for (k, v) in all_stats.llvm_insns.iter() {
|
2014-11-17 11:29:38 -08:00
|
|
|
println!("{:7} {}", *v, *k);
|
2013-06-17 16:23:24 +12:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-26 16:02:32 +02:00
|
|
|
ongoing_translation.check_for_errors(tcx.sess);
|
2017-07-26 14:18:11 +02:00
|
|
|
|
2017-07-26 16:02:32 +02:00
|
|
|
assert_and_save_dep_graph(tcx,
|
|
|
|
metadata_incr_hashes,
|
|
|
|
link_meta);
|
2017-07-26 12:35:23 +02:00
|
|
|
ongoing_translation
|
2013-06-13 21:25:12 -07:00
|
|
|
}
|
2015-11-18 05:38:50 -05:00
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
// FIXME(#42293) hopefully once red/green is enabled we're testing everything
|
|
|
|
// via a method that doesn't require this!
|
|
|
|
thread_local!(static DISPOSITIONS: RefCell<Vec<(String, Disposition)>> = Default::default());
|
|
|
|
|
2017-07-26 15:02:53 +02:00
|
|
|
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
metadata_incr_hashes: EncodedMetadataHashes,
|
|
|
|
link_meta: LinkMeta) {
|
|
|
|
time(tcx.sess.time_passes(),
|
|
|
|
"assert dep graph",
|
|
|
|
|| rustc_incremental::assert_dep_graph(tcx));
|
|
|
|
|
|
|
|
time(tcx.sess.time_passes(),
|
|
|
|
"serialize dep graph",
|
|
|
|
|| rustc_incremental::save_dep_graph(tcx,
|
|
|
|
&metadata_incr_hashes,
|
|
|
|
link_meta.crate_hash));
|
|
|
|
}
|
|
|
|
|
2017-04-24 20:27:59 +03:00
|
|
|
#[inline(never)] // give this a place in the profiler
|
|
|
|
fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trans_items: I)
|
|
|
|
where I: Iterator<Item=&'a TransItem<'tcx>>
|
|
|
|
{
|
|
|
|
let mut symbols: Vec<_> = trans_items.map(|trans_item| {
|
|
|
|
(trans_item, trans_item.symbol_name(tcx))
|
|
|
|
}).collect();
|
|
|
|
|
|
|
|
(&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
|
|
|
|
sym1.cmp(sym2)
|
|
|
|
});
|
|
|
|
|
|
|
|
for pair in (&symbols[..]).windows(2) {
|
|
|
|
let sym1 = &pair[0].1;
|
|
|
|
let sym2 = &pair[1].1;
|
|
|
|
|
|
|
|
if *sym1 == *sym2 {
|
|
|
|
let trans_item1 = pair[0].0;
|
|
|
|
let trans_item2 = pair[1].0;
|
|
|
|
|
|
|
|
let span1 = trans_item1.local_span(tcx);
|
|
|
|
let span2 = trans_item2.local_span(tcx);
|
|
|
|
|
|
|
|
// Deterministically select one of the spans for error reporting
|
|
|
|
let span = match (span1, span2) {
|
|
|
|
(Some(span1), Some(span2)) => {
|
2017-07-31 23:04:34 +03:00
|
|
|
Some(if span1.lo().0 > span2.lo().0 {
|
2017-04-24 20:27:59 +03:00
|
|
|
span1
|
|
|
|
} else {
|
|
|
|
span2
|
|
|
|
})
|
|
|
|
}
|
|
|
|
(Some(span), None) |
|
|
|
|
(None, Some(span)) => Some(span),
|
|
|
|
_ => None
|
|
|
|
};
|
|
|
|
|
|
|
|
let error_message = format!("symbol `{}` is already defined", sym1);
|
|
|
|
|
|
|
|
if let Some(span) = span {
|
|
|
|
tcx.sess.span_fatal(span, &error_message)
|
|
|
|
} else {
|
|
|
|
tcx.sess.fatal(&error_message)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-12 11:04:46 -07:00
|
|
|
fn collect_and_partition_translation_items<'a, 'tcx>(
|
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
cnum: CrateNum,
|
2017-09-13 15:24:13 -07:00
|
|
|
) -> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>)
|
2017-09-12 11:04:46 -07:00
|
|
|
{
|
|
|
|
assert_eq!(cnum, LOCAL_CRATE);
|
2017-09-12 09:32:37 -07:00
|
|
|
let time_passes = tcx.sess.time_passes();
|
2015-11-02 14:46:39 +01:00
|
|
|
|
2017-09-12 09:32:37 -07:00
|
|
|
let collection_mode = match tcx.sess.opts.debugging_opts.print_trans_items {
|
2015-11-02 14:46:39 +01:00
|
|
|
Some(ref s) => {
|
|
|
|
let mode_string = s.to_lowercase();
|
|
|
|
let mode_string = mode_string.trim();
|
|
|
|
if mode_string == "eager" {
|
|
|
|
TransItemCollectionMode::Eager
|
|
|
|
} else {
|
|
|
|
if mode_string != "lazy" {
|
|
|
|
let message = format!("Unknown codegen-item collection mode '{}'. \
|
|
|
|
Falling back to 'lazy' mode.",
|
|
|
|
mode_string);
|
2017-09-12 09:32:37 -07:00
|
|
|
tcx.sess.warn(&message);
|
2015-11-02 14:46:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
TransItemCollectionMode::Lazy
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => TransItemCollectionMode::Lazy
|
|
|
|
};
|
|
|
|
|
2016-05-26 08:59:58 -04:00
|
|
|
let (items, inlining_map) =
|
|
|
|
time(time_passes, "translation item collection", || {
|
2017-09-13 13:22:20 -07:00
|
|
|
collector::collect_crate_translation_items(tcx, collection_mode)
|
2015-11-02 14:46:39 +01:00
|
|
|
});
|
|
|
|
|
2017-09-12 09:32:37 -07:00
|
|
|
assert_symbols_are_distinct(tcx, items.iter());
|
2016-05-26 08:59:58 -04:00
|
|
|
|
2017-09-12 09:32:37 -07:00
|
|
|
let strategy = if tcx.sess.opts.debugging_opts.incremental.is_some() {
|
2016-04-21 16:45:33 -04:00
|
|
|
PartitioningStrategy::PerModule
|
|
|
|
} else {
|
2017-09-12 09:32:37 -07:00
|
|
|
PartitioningStrategy::FixedUnitCount(tcx.sess.opts.cg.codegen_units)
|
2016-04-21 16:45:33 -04:00
|
|
|
};
|
|
|
|
|
2016-03-24 11:40:49 -04:00
|
|
|
let codegen_units = time(time_passes, "codegen unit partitioning", || {
|
2017-09-12 09:32:37 -07:00
|
|
|
partitioning::partition(tcx,
|
2016-04-21 16:45:33 -04:00
|
|
|
items.iter().cloned(),
|
|
|
|
strategy,
|
2017-09-13 13:22:20 -07:00
|
|
|
&inlining_map)
|
2017-09-12 11:04:46 -07:00
|
|
|
.into_iter()
|
|
|
|
.map(Arc::new)
|
|
|
|
.collect::<Vec<_>>()
|
2016-03-24 11:40:49 -04:00
|
|
|
});
|
|
|
|
|
2017-09-12 09:32:37 -07:00
|
|
|
assert!(tcx.sess.opts.cg.codegen_units == codegen_units.len() ||
|
|
|
|
tcx.sess.opts.debugging_opts.incremental.is_some());
|
2016-05-26 12:18:39 -04:00
|
|
|
|
2017-09-13 15:24:13 -07:00
|
|
|
let translation_items: DefIdSet = items.iter().filter_map(|trans_item| {
|
|
|
|
match *trans_item {
|
|
|
|
TransItem::Fn(ref instance) => Some(instance.def_id()),
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}).collect();
|
2016-06-07 21:14:51 -04:00
|
|
|
|
2017-09-12 09:32:37 -07:00
|
|
|
if tcx.sess.opts.debugging_opts.print_trans_items.is_some() {
|
2016-11-08 14:02:55 +11:00
|
|
|
let mut item_to_cgus = FxHashMap();
|
2016-03-24 11:40:49 -04:00
|
|
|
|
2016-05-06 14:27:34 -04:00
|
|
|
for cgu in &codegen_units {
|
2016-07-21 12:49:59 -04:00
|
|
|
for (&trans_item, &linkage) in cgu.items() {
|
2016-03-24 11:40:49 -04:00
|
|
|
item_to_cgus.entry(trans_item)
|
|
|
|
.or_insert(Vec::new())
|
2016-07-21 12:49:59 -04:00
|
|
|
.push((cgu.name().clone(), linkage));
|
2016-03-24 11:40:49 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut item_keys: Vec<_> = items
|
|
|
|
.iter()
|
|
|
|
.map(|i| {
|
2017-09-12 09:32:37 -07:00
|
|
|
let mut output = i.to_string(tcx);
|
2016-03-24 11:40:49 -04:00
|
|
|
output.push_str(" @@");
|
|
|
|
let mut empty = Vec::new();
|
2017-08-01 13:03:03 +01:00
|
|
|
let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
|
2016-03-24 11:40:49 -04:00
|
|
|
cgus.as_mut_slice().sort_by_key(|&(ref name, _)| name.clone());
|
|
|
|
cgus.dedup();
|
2017-07-12 17:37:58 +02:00
|
|
|
for &(ref cgu_name, (linkage, _)) in cgus.iter() {
|
2016-03-24 11:40:49 -04:00
|
|
|
output.push_str(" ");
|
2017-03-24 09:31:26 +01:00
|
|
|
output.push_str(&cgu_name);
|
2016-03-24 11:40:49 -04:00
|
|
|
|
|
|
|
let linkage_abbrev = match linkage {
|
2017-09-12 11:04:46 -07:00
|
|
|
Linkage::External => "External",
|
|
|
|
Linkage::AvailableExternally => "Available",
|
|
|
|
Linkage::LinkOnceAny => "OnceAny",
|
|
|
|
Linkage::LinkOnceODR => "OnceODR",
|
|
|
|
Linkage::WeakAny => "WeakAny",
|
|
|
|
Linkage::WeakODR => "WeakODR",
|
|
|
|
Linkage::Appending => "Appending",
|
|
|
|
Linkage::Internal => "Internal",
|
|
|
|
Linkage::Private => "Private",
|
|
|
|
Linkage::ExternalWeak => "ExternalWeak",
|
|
|
|
Linkage::Common => "Common",
|
2016-03-24 11:40:49 -04:00
|
|
|
};
|
|
|
|
|
|
|
|
output.push_str("[");
|
|
|
|
output.push_str(linkage_abbrev);
|
|
|
|
output.push_str("]");
|
|
|
|
}
|
|
|
|
output
|
|
|
|
})
|
|
|
|
.collect();
|
|
|
|
|
2015-11-02 14:46:39 +01:00
|
|
|
item_keys.sort();
|
|
|
|
|
|
|
|
for item in item_keys {
|
|
|
|
println!("TRANS_ITEM {}", item);
|
|
|
|
}
|
|
|
|
}
|
2016-05-06 14:27:34 -04:00
|
|
|
|
2017-09-13 15:24:13 -07:00
|
|
|
(Arc::new(translation_items), Arc::new(codegen_units))
|
2016-05-26 08:59:58 -04:00
|
|
|
}
|
2017-08-28 15:55:32 -07:00
|
|
|
|
|
|
|
impl CrateInfo {
|
2017-09-13 13:22:20 -07:00
|
|
|
pub fn new(tcx: TyCtxt) -> CrateInfo {
|
2017-08-28 15:55:32 -07:00
|
|
|
let mut info = CrateInfo {
|
|
|
|
panic_runtime: None,
|
|
|
|
compiler_builtins: None,
|
|
|
|
profiler_runtime: None,
|
|
|
|
sanitizer_runtime: None,
|
|
|
|
is_no_builtins: FxHashSet(),
|
2017-08-28 17:06:03 -07:00
|
|
|
native_libraries: FxHashMap(),
|
2017-08-30 14:48:57 -07:00
|
|
|
used_libraries: tcx.native_libraries(LOCAL_CRATE),
|
|
|
|
link_args: tcx.link_args(LOCAL_CRATE),
|
2017-08-31 08:07:39 -07:00
|
|
|
crate_name: FxHashMap(),
|
2017-08-31 12:08:29 -07:00
|
|
|
used_crates_dynamic: cstore::used_crates(tcx, LinkagePreference::RequireDynamic),
|
|
|
|
used_crates_static: cstore::used_crates(tcx, LinkagePreference::RequireStatic),
|
|
|
|
used_crate_source: FxHashMap(),
|
2017-08-28 15:55:32 -07:00
|
|
|
};
|
|
|
|
|
2017-09-07 08:13:41 -07:00
|
|
|
for &cnum in tcx.crates().iter() {
|
2017-08-28 17:06:03 -07:00
|
|
|
info.native_libraries.insert(cnum, tcx.native_libraries(cnum));
|
2017-08-31 08:07:39 -07:00
|
|
|
info.crate_name.insert(cnum, tcx.crate_name(cnum).to_string());
|
2017-08-31 12:08:29 -07:00
|
|
|
info.used_crate_source.insert(cnum, tcx.used_crate_source(cnum));
|
2017-08-28 15:55:32 -07:00
|
|
|
if tcx.is_panic_runtime(cnum) {
|
|
|
|
info.panic_runtime = Some(cnum);
|
|
|
|
}
|
|
|
|
if tcx.is_compiler_builtins(cnum) {
|
|
|
|
info.compiler_builtins = Some(cnum);
|
|
|
|
}
|
|
|
|
if tcx.is_profiler_runtime(cnum) {
|
|
|
|
info.profiler_runtime = Some(cnum);
|
|
|
|
}
|
|
|
|
if tcx.is_sanitizer_runtime(cnum) {
|
|
|
|
info.sanitizer_runtime = Some(cnum);
|
|
|
|
}
|
|
|
|
if tcx.is_no_builtins(cnum) {
|
|
|
|
info.is_no_builtins.insert(cnum);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-30 14:48:57 -07:00
|
|
|
|
2017-08-28 15:55:32 -07:00
|
|
|
return info
|
|
|
|
}
|
|
|
|
}
|
2017-09-12 11:04:46 -07:00
|
|
|
|
2017-09-13 15:24:13 -07:00
|
|
|
fn is_translated_function(tcx: TyCtxt, id: DefId) -> bool {
|
|
|
|
// FIXME(#42293) needs red/green tracking to avoid failing a bunch of
|
|
|
|
// existing tests
|
|
|
|
tcx.dep_graph.with_ignore(|| {
|
|
|
|
let (all_trans_items, _) =
|
|
|
|
tcx.collect_and_partition_translation_items(LOCAL_CRATE);
|
|
|
|
all_trans_items.contains(&id)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-09-13 20:26:39 -07:00
|
|
|
fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
cgu: InternedString) -> Stats {
|
|
|
|
// FIXME(#42293) needs red/green tracking to avoid failing a bunch of
|
|
|
|
// existing tests
|
|
|
|
let cgu = tcx.dep_graph.with_ignore(|| {
|
|
|
|
tcx.codegen_unit(cgu)
|
|
|
|
});
|
|
|
|
|
|
|
|
let start_time = Instant::now();
|
|
|
|
let dep_node = cgu.work_product_dep_node();
|
|
|
|
let ((stats, module), _) =
|
|
|
|
tcx.dep_graph.with_task(dep_node,
|
2017-09-18 12:14:52 +02:00
|
|
|
tcx,
|
|
|
|
cgu,
|
2017-09-13 20:26:39 -07:00
|
|
|
module_translation);
|
|
|
|
let time_to_translate = start_time.elapsed();
|
|
|
|
|
|
|
|
if tcx.sess.opts.incremental.is_some() {
|
|
|
|
DISPOSITIONS.with(|d| {
|
|
|
|
d.borrow_mut().push(module.disposition());
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// We assume that the cost to run LLVM on a CGU is proportional to
|
|
|
|
// the time we needed for translating it.
|
|
|
|
let cost = time_to_translate.as_secs() * 1_000_000_000 +
|
|
|
|
time_to_translate.subsec_nanos() as u64;
|
|
|
|
|
|
|
|
write::submit_translated_module_to_llvm(tcx,
|
|
|
|
module,
|
|
|
|
cost);
|
|
|
|
return stats;
|
|
|
|
|
|
|
|
fn module_translation<'a, 'tcx>(
|
2017-09-18 12:14:52 +02:00
|
|
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
|
|
|
cgu: Arc<CodegenUnit<'tcx>>)
|
2017-09-13 20:26:39 -07:00
|
|
|
-> (Stats, ModuleTranslation)
|
|
|
|
{
|
|
|
|
let cgu_name = cgu.name().to_string();
|
|
|
|
let cgu_id = cgu.work_product_id();
|
|
|
|
let symbol_name_hash = cgu.compute_symbol_name_hash(tcx);
|
|
|
|
|
|
|
|
// Check whether there is a previous work-product we can
|
|
|
|
// re-use. Not only must the file exist, and the inputs not
|
|
|
|
// be dirty, but the hash of the symbols we will generate must
|
|
|
|
// be the same.
|
|
|
|
let previous_work_product =
|
|
|
|
tcx.dep_graph.previous_work_product(&cgu_id).and_then(|work_product| {
|
|
|
|
if work_product.input_hash == symbol_name_hash {
|
|
|
|
debug!("trans_reuse_previous_work_products: reusing {:?}", work_product);
|
|
|
|
Some(work_product)
|
|
|
|
} else {
|
|
|
|
if tcx.sess.opts.debugging_opts.incremental_info {
|
|
|
|
eprintln!("incremental: CGU `{}` invalidated because of \
|
|
|
|
changed partitioning hash.",
|
|
|
|
cgu.name());
|
|
|
|
}
|
|
|
|
debug!("trans_reuse_previous_work_products: \
|
|
|
|
not reusing {:?} because hash changed to {:?}",
|
|
|
|
work_product, symbol_name_hash);
|
|
|
|
None
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if let Some(buf) = previous_work_product {
|
|
|
|
// Don't need to translate this module.
|
|
|
|
let module = ModuleTranslation {
|
|
|
|
name: cgu_name,
|
|
|
|
symbol_name_hash,
|
|
|
|
source: ModuleSource::Preexisting(buf.clone()),
|
|
|
|
kind: ModuleKind::Regular,
|
|
|
|
};
|
|
|
|
return (Stats::default(), module);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Instantiate translation items without filling out definitions yet...
|
|
|
|
let scx = SharedCrateContext::new(tcx);
|
|
|
|
let lcx = LocalCrateContext::new(&scx, cgu);
|
|
|
|
let module = {
|
|
|
|
let ccx = CrateContext::new(&scx, &lcx);
|
|
|
|
let trans_items = ccx.codegen_unit()
|
|
|
|
.items_in_deterministic_order(ccx.tcx());
|
|
|
|
for &(trans_item, (linkage, visibility)) in &trans_items {
|
|
|
|
trans_item.predefine(&ccx, linkage, visibility);
|
|
|
|
}
|
|
|
|
|
|
|
|
// ... and now that we have everything pre-defined, fill out those definitions.
|
|
|
|
for &(trans_item, _) in &trans_items {
|
|
|
|
trans_item.define(&ccx);
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this codegen unit contains the main function, also create the
|
|
|
|
// wrapper here
|
|
|
|
maybe_create_entry_wrapper(&ccx);
|
|
|
|
|
|
|
|
// Run replace-all-uses-with for statics that need it
|
|
|
|
for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
|
|
|
|
unsafe {
|
|
|
|
let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
|
|
|
|
llvm::LLVMReplaceAllUsesWith(old_g, bitcast);
|
|
|
|
llvm::LLVMDeleteGlobal(old_g);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the llvm.used variable
|
|
|
|
// This variable has type [N x i8*] and is stored in the llvm.metadata section
|
|
|
|
if !ccx.used_statics().borrow().is_empty() {
|
|
|
|
let name = CString::new("llvm.used").unwrap();
|
|
|
|
let section = CString::new("llvm.metadata").unwrap();
|
|
|
|
let array = C_array(Type::i8(&ccx).ptr_to(), &*ccx.used_statics().borrow());
|
|
|
|
|
|
|
|
unsafe {
|
|
|
|
let g = llvm::LLVMAddGlobal(ccx.llmod(),
|
|
|
|
val_ty(array).to_ref(),
|
|
|
|
name.as_ptr());
|
|
|
|
llvm::LLVMSetInitializer(g, array);
|
|
|
|
llvm::LLVMRustSetLinkage(g, llvm::Linkage::AppendingLinkage);
|
|
|
|
llvm::LLVMSetSection(g, section.as_ptr());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finalize debuginfo
|
|
|
|
if ccx.sess().opts.debuginfo != NoDebugInfo {
|
|
|
|
debuginfo::finalize(&ccx);
|
|
|
|
}
|
|
|
|
|
|
|
|
let llvm_module = ModuleLlvm {
|
|
|
|
llcx: ccx.llcx(),
|
|
|
|
llmod: ccx.llmod(),
|
|
|
|
};
|
|
|
|
|
|
|
|
// In LTO mode we inject the allocator shim into the existing
|
|
|
|
// module.
|
|
|
|
if ccx.sess().lto() {
|
|
|
|
if let Some(kind) = ccx.sess().allocator_kind.get() {
|
|
|
|
time(ccx.sess().time_passes(), "write allocator module", || {
|
|
|
|
unsafe {
|
|
|
|
allocator::trans(ccx.tcx(), &llvm_module, kind);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Adjust exported symbols for MSVC dllimport
|
|
|
|
if ccx.sess().target.target.options.is_like_msvc &&
|
|
|
|
ccx.sess().crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) {
|
|
|
|
create_imps(ccx.sess(), &llvm_module);
|
|
|
|
}
|
|
|
|
|
|
|
|
ModuleTranslation {
|
|
|
|
name: cgu_name,
|
|
|
|
symbol_name_hash,
|
|
|
|
source: ModuleSource::Translated(llvm_module),
|
|
|
|
kind: ModuleKind::Regular,
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
(lcx.into_stats(), module)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-13 15:24:13 -07:00
|
|
|
pub fn provide_local(providers: &mut Providers) {
|
2017-09-12 11:04:46 -07:00
|
|
|
providers.collect_and_partition_translation_items =
|
|
|
|
collect_and_partition_translation_items;
|
2017-09-13 15:24:13 -07:00
|
|
|
|
|
|
|
providers.is_translated_function = is_translated_function;
|
2017-09-13 20:26:39 -07:00
|
|
|
|
|
|
|
providers.codegen_unit = |tcx, name| {
|
|
|
|
let (_, all) = tcx.collect_and_partition_translation_items(LOCAL_CRATE);
|
|
|
|
all.iter()
|
|
|
|
.find(|cgu| *cgu.name() == name)
|
|
|
|
.cloned()
|
|
|
|
.expect(&format!("failed to find cgu with name {:?}", name))
|
|
|
|
};
|
|
|
|
providers.compile_codegen_unit = compile_codegen_unit;
|
2017-09-13 15:24:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn provide_extern(providers: &mut Providers) {
|
|
|
|
providers.is_translated_function = is_translated_function;
|
2017-09-12 11:04:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn linkage_to_llvm(linkage: Linkage) -> llvm::Linkage {
|
|
|
|
match linkage {
|
|
|
|
Linkage::External => llvm::Linkage::ExternalLinkage,
|
|
|
|
Linkage::AvailableExternally => llvm::Linkage::AvailableExternallyLinkage,
|
|
|
|
Linkage::LinkOnceAny => llvm::Linkage::LinkOnceAnyLinkage,
|
|
|
|
Linkage::LinkOnceODR => llvm::Linkage::LinkOnceODRLinkage,
|
|
|
|
Linkage::WeakAny => llvm::Linkage::WeakAnyLinkage,
|
|
|
|
Linkage::WeakODR => llvm::Linkage::WeakODRLinkage,
|
|
|
|
Linkage::Appending => llvm::Linkage::AppendingLinkage,
|
|
|
|
Linkage::Internal => llvm::Linkage::InternalLinkage,
|
|
|
|
Linkage::Private => llvm::Linkage::PrivateLinkage,
|
|
|
|
Linkage::ExternalWeak => llvm::Linkage::ExternalWeakLinkage,
|
|
|
|
Linkage::Common => llvm::Linkage::CommonLinkage,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn visibility_to_llvm(linkage: Visibility) -> llvm::Visibility {
|
|
|
|
match linkage {
|
|
|
|
Visibility::Default => llvm::Visibility::Default,
|
|
|
|
Visibility::Hidden => llvm::Visibility::Hidden,
|
|
|
|
Visibility::Protected => llvm::Visibility::Protected,
|
2017-09-18 12:14:52 +02:00
|
|
|
}
|
2017-09-07 16:11:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME(mw): Anything that is produced via DepGraph::with_task() must implement
|
|
|
|
// the HashStable trait. Normally DepGraph::with_task() calls are
|
|
|
|
// hidden behind queries, but CGU creation is a special case in two
|
|
|
|
// ways: (1) it's not a query and (2) CGU are output nodes, so their
|
|
|
|
// Fingerprints are not actually needed. It remains to be clarified
|
|
|
|
// how exactly this case will be handled in the red/green system but
|
|
|
|
// for now we content ourselves with providing a no-op HashStable
|
|
|
|
// implementation for CGUs.
|
|
|
|
mod temp_stable_hash_impls {
|
|
|
|
use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher,
|
|
|
|
HashStable};
|
|
|
|
use ModuleTranslation;
|
|
|
|
|
|
|
|
impl<HCX> HashStable<HCX> for ModuleTranslation {
|
|
|
|
fn hash_stable<W: StableHasherResult>(&self,
|
|
|
|
_: &mut HCX,
|
|
|
|
_: &mut StableHasher<W>) {
|
|
|
|
// do nothing
|
|
|
|
}
|
2017-09-12 11:04:46 -07:00
|
|
|
}
|
|
|
|
}
|