1
Fork 0

store the normalized types of field accesses

Fixes #31504
This commit is contained in:
Ariel Ben-Yehuda 2016-02-11 18:31:42 +02:00 committed by Ariel Ben-Yehuda
parent 350b50df00
commit 3c6f41026b
15 changed files with 150 additions and 56 deletions

View file

@ -131,6 +131,12 @@ pub struct Tables<'tcx> {
/// equivalents. This table is not used in trans (since regions /// equivalents. This table is not used in trans (since regions
/// are erased there) and hence is not serialized to metadata. /// are erased there) and hence is not serialized to metadata.
pub liberated_fn_sigs: NodeMap<ty::FnSig<'tcx>>, pub liberated_fn_sigs: NodeMap<ty::FnSig<'tcx>>,
/// For each FRU expression, record the normalized types of the fields
/// of the struct - this is needed because it is non-trivial to
/// normalize while preserving regions. This table is used only in
/// MIR construction and hence is not serialized to metadata.
pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>
} }
impl<'tcx> Tables<'tcx> { impl<'tcx> Tables<'tcx> {
@ -144,6 +150,7 @@ impl<'tcx> Tables<'tcx> {
closure_tys: DefIdMap(), closure_tys: DefIdMap(),
closure_kinds: DefIdMap(), closure_kinds: DefIdMap(),
liberated_fn_sigs: NodeMap(), liberated_fn_sigs: NodeMap(),
fru_field_types: NodeMap()
} }
} }

View file

@ -502,7 +502,7 @@ pub struct Projection<'tcx, B, V> {
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)] #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum ProjectionElem<'tcx, V> { pub enum ProjectionElem<'tcx, V> {
Deref, Deref,
Field(Field), Field(Field, Ty<'tcx>),
Index(V), Index(V),
/// These indices are generated by slice patterns. Easiest to explain /// These indices are generated by slice patterns. Easiest to explain
@ -553,8 +553,8 @@ impl Field {
} }
impl<'tcx> Lvalue<'tcx> { impl<'tcx> Lvalue<'tcx> {
pub fn field(self, f: Field) -> Lvalue<'tcx> { pub fn field(self, f: Field, ty: Ty<'tcx>) -> Lvalue<'tcx> {
self.elem(ProjectionElem::Field(f)) self.elem(ProjectionElem::Field(f, ty))
} }
pub fn deref(self) -> Lvalue<'tcx> { pub fn deref(self) -> Lvalue<'tcx> {
@ -594,8 +594,8 @@ impl<'tcx> Debug for Lvalue<'tcx> {
write!(fmt, "({:?} as {})", data.base, adt_def.variants[index].name), write!(fmt, "({:?} as {})", data.base, adt_def.variants[index].name),
ProjectionElem::Deref => ProjectionElem::Deref =>
write!(fmt, "(*{:?})", data.base), write!(fmt, "(*{:?})", data.base),
ProjectionElem::Field(field) => ProjectionElem::Field(field, ty) =>
write!(fmt, "{:?}.{:?}", data.base, field.index()), write!(fmt, "({:?}.{:?}: {:?})", data.base, field.index(), ty),
ProjectionElem::Index(ref index) => ProjectionElem::Index(ref index) =>
write!(fmt, "{:?}[{:?}]", data.base, index), write!(fmt, "{:?}[{:?}]", data.base, index),
ProjectionElem::ConstantIndex { offset, min_length, from_end: false } => ProjectionElem::ConstantIndex { offset, min_length, from_end: false } =>

View file

@ -73,23 +73,7 @@ impl<'tcx> LvalueTy<'tcx> {
tcx.sess.bug(&format!("cannot downcast non-enum type: `{:?}`", self)) tcx.sess.bug(&format!("cannot downcast non-enum type: `{:?}`", self))
} }
}, },
ProjectionElem::Field(field) => { ProjectionElem::Field(_, fty) => LvalueTy::Ty { ty: fty }
let field_ty = match self {
LvalueTy::Ty { ty } => match ty.sty {
ty::TyStruct(adt_def, substs) =>
adt_def.struct_variant().fields[field.index()].ty(tcx, substs),
ty::TyTuple(ref tys) =>
tys[field.index()],
ty::TyClosure(_, ref closure_substs) =>
closure_substs.upvar_tys[field.index()],
_ =>
tcx.sess.bug(&format!("cannot get field of type: `{:?}`", ty)),
},
LvalueTy::Downcast { adt_def, substs, variant_index } =>
adt_def.variants[variant_index].fields[field.index()].ty(tcx, substs),
};
LvalueTy::Ty { ty: field_ty }
}
} }
} }
} }

View file

@ -41,7 +41,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
} }
ExprKind::Field { lhs, name } => { ExprKind::Field { lhs, name } => {
let lvalue = unpack!(block = this.as_lvalue(block, lhs)); let lvalue = unpack!(block = this.as_lvalue(block, lhs));
let lvalue = lvalue.field(name); let lvalue = lvalue.field(name, expr.ty);
block.and(lvalue) block.and(lvalue)
} }
ExprKind::Deref { arg } => { ExprKind::Deref { arg } => {

View file

@ -139,7 +139,9 @@ impl<'a,'tcx> Builder<'a,'tcx> {
.collect(); .collect();
block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars)) block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))
} }
ExprKind::Adt { adt_def, variant_index, substs, fields, base } => { // see (*) above ExprKind::Adt {
adt_def, variant_index, substs, fields, base
} => { // see (*) above
// first process the set of fields that were provided // first process the set of fields that were provided
// (evaluating them in order given by user) // (evaluating them in order given by user)
let fields_map: FnvHashMap<_, _> = let fields_map: FnvHashMap<_, _> =
@ -147,25 +149,20 @@ impl<'a,'tcx> Builder<'a,'tcx> {
.map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr)))) .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))
.collect(); .collect();
// if base expression is given, evaluate it now
let base = base.map(|base| unpack!(block = this.as_lvalue(block, base)));
// get list of all fields that we will need
let field_names = this.hir.all_fields(adt_def, variant_index); let field_names = this.hir.all_fields(adt_def, variant_index);
// for the actual values we use, take either the let fields = if let Some(FruInfo { base, field_types }) = base {
// expr the user specified or, if they didn't let base = unpack!(block = this.as_lvalue(block, base));
// specify something for this field name, create a
// path relative to the base (which must have been
// supplied, or the IR is internally
// inconsistent).
let fields: Vec<_> =
field_names.into_iter() field_names.into_iter()
.map(|n| match fields_map.get(&n) { .zip(field_types.into_iter())
Some(v) => v.clone(), .map(|(n, ty)| match fields_map.get(&n) {
None => Operand::Consume(base.clone().unwrap().field(n)), Some(v) => v.clone(),
}) None => Operand::Consume(base.clone().field(n, ty))
.collect(); })
.collect()
} else {
field_names.iter().map(|n| fields_map[n].clone()).collect()
};
block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs), block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),
fields)) fields))

View file

@ -404,7 +404,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
subpatterns.iter() subpatterns.iter()
.map(|subpattern| { .map(|subpattern| {
// e.g., `(x as Variant).0` // e.g., `(x as Variant).0`
let lvalue = downcast_lvalue.clone().field(subpattern.field); let lvalue = downcast_lvalue.clone().field(subpattern.field,
subpattern.field_ty());
// e.g., `(x as Variant).0 @ P1` // e.g., `(x as Variant).0 @ P1`
MatchPair::new(lvalue, &subpattern.pattern) MatchPair::new(lvalue, &subpattern.pattern)
}); });

View file

@ -21,7 +21,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
-> Vec<MatchPair<'pat, 'tcx>> { -> Vec<MatchPair<'pat, 'tcx>> {
subpatterns.iter() subpatterns.iter()
.map(|fieldpat| { .map(|fieldpat| {
let lvalue = lvalue.clone().field(fieldpat.field); let lvalue = lvalue.clone().field(fieldpat.field,
fieldpat.field_ty());
MatchPair::new(lvalue, &fieldpat.pattern) MatchPair::new(lvalue, &fieldpat.pattern)
}) })
.collect() .collect()

View file

@ -248,13 +248,23 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
variant_index: 0, variant_index: 0,
substs: substs, substs: substs,
fields: field_refs, fields: field_refs,
base: base.to_ref(), base: base.as_ref().map(|base| {
FruInfo {
base: base.to_ref(),
field_types: cx.tcx.tables
.borrow()
.fru_field_types[&self.id]
.clone()
}
})
} }
} }
ty::TyEnum(adt, substs) => { ty::TyEnum(adt, substs) => {
match cx.tcx.def_map.borrow()[&self.id].full_def() { match cx.tcx.def_map.borrow()[&self.id].full_def() {
Def::Variant(enum_id, variant_id) => { Def::Variant(enum_id, variant_id) => {
debug_assert!(adt.did == enum_id); debug_assert!(adt.did == enum_id);
assert!(base.is_none());
let index = adt.variant_index_with_id(variant_id); let index = adt.variant_index_with_id(variant_id);
let field_refs = field_refs(&adt.variants[index], fields); let field_refs = field_refs(&adt.variants[index], fields);
ExprKind::Adt { ExprKind::Adt {
@ -262,7 +272,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
variant_index: index, variant_index: index,
substs: substs, substs: substs,
fields: field_refs, fields: field_refs,
base: base.to_ref(), base: None
} }
} }
ref def => { ref def => {
@ -810,11 +820,16 @@ fn convert_var<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>,
}; };
match upvar_capture { match upvar_capture {
ty::UpvarCapture::ByValue => field_kind, ty::UpvarCapture::ByValue => field_kind,
ty::UpvarCapture::ByRef(_) => { ty::UpvarCapture::ByRef(borrow) => {
ExprKind::Deref { ExprKind::Deref {
arg: Expr { arg: Expr {
temp_lifetime: temp_lifetime, temp_lifetime: temp_lifetime,
ty: var_ty, ty: cx.tcx.mk_ref(
cx.tcx.mk_region(borrow.region),
ty::TypeAndMut {
ty: var_ty,
mutbl: borrow.kind.to_mutbl_lossy()
}),
span: expr.span, span: expr.span,
kind: field_kind, kind: field_kind,
}.to_ref() }.to_ref()

View file

@ -314,3 +314,20 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> {
} }
} }
} }
impl<'tcx> FieldPattern<'tcx> {
pub fn field_ty(&self) -> Ty<'tcx> {
debug!("field_ty({:?},ty={:?})", self, self.pattern.ty);
let r = match *self.pattern.kind {
PatternKind::Binding { mode: BindingMode::ByRef(..), ..} => {
match self.pattern.ty.sty {
ty::TyRef(_, mt) => mt.ty,
_ => unreachable!()
}
}
_ => self.pattern.ty
};
debug!("field_ty -> {:?}", r);
r
}
}

View file

@ -229,7 +229,7 @@ pub enum ExprKind<'tcx> {
variant_index: usize, variant_index: usize,
substs: &'tcx Substs<'tcx>, substs: &'tcx Substs<'tcx>,
fields: Vec<FieldExprRef<'tcx>>, fields: Vec<FieldExprRef<'tcx>>,
base: Option<ExprRef<'tcx>>, base: Option<FruInfo<'tcx>>
}, },
Closure { Closure {
closure_id: DefId, closure_id: DefId,
@ -256,6 +256,12 @@ pub struct FieldExprRef<'tcx> {
pub expr: ExprRef<'tcx>, pub expr: ExprRef<'tcx>,
} }
#[derive(Clone, Debug)]
pub struct FruInfo<'tcx> {
pub base: ExprRef<'tcx>,
pub field_types: Vec<Ty<'tcx>>
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Arm<'tcx> { pub struct Arm<'tcx> {
pub patterns: Vec<Pattern<'tcx>>, pub patterns: Vec<Pattern<'tcx>>,

View file

@ -47,6 +47,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'tcx> {
fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: visit::LvalueContext) { fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: visit::LvalueContext) {
self.super_lvalue(lvalue, context); self.super_lvalue(lvalue, context);
debug!("visiting lvalue {:?}", lvalue);
let lv_ty = self.mir.lvalue_ty(self.tcx(), lvalue).to_ty(self.tcx()); let lv_ty = self.mir.lvalue_ty(self.tcx(), lvalue).to_ty(self.tcx());
self.sanitize_type(lvalue, lv_ty); self.sanitize_type(lvalue, lv_ty);
} }

View file

@ -126,7 +126,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
} }
}) })
} }
mir::ProjectionElem::Field(ref field) => { mir::ProjectionElem::Field(ref field, _) => {
let base_ty = tr_base.ty.to_ty(tcx); let base_ty = tr_base.ty.to_ty(tcx);
let base_repr = adt::represent_type(ccx, base_ty); let base_repr = adt::represent_type(ccx, base_ty);
let discr = match tr_base.ty { let discr = match tr_base.ty {

View file

@ -3179,8 +3179,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
check_struct_fields_on_error(fcx, expr.id, fields, base_expr); check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
return; return;
} }
let (adt, variant) = match fcx.def_struct_variant(def, path.span) { let variant = match fcx.def_struct_variant(def, path.span) {
Some((adt, variant)) => (adt, variant), Some((_, variant)) => variant,
None => { None => {
span_err!(fcx.tcx().sess, path.span, E0071, span_err!(fcx.tcx().sess, path.span, E0071,
"`{}` does not name a structure", "`{}` does not name a structure",
@ -3195,12 +3195,23 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields, check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
base_expr.is_none()); base_expr.is_none());
if let &Some(ref base_expr) = base_expr { if let &Some(ref base_expr) = base_expr {
check_expr_has_type(fcx, base_expr, expr_ty); check_expr_has_type(fcx, base_expr, expr_ty);
if adt.adt_kind() == ty::AdtKind::Enum { match expr_ty.sty {
span_err!(tcx.sess, base_expr.span, E0436, ty::TyStruct(adt, substs) => {
"functional record update syntax requires a struct"); fcx.inh.tables.borrow_mut().fru_field_types.insert(
expr.id,
adt.struct_variant().fields.iter().map(|f| {
fcx.normalize_associated_types_in(
expr.span, &f.ty(tcx, substs)
)
}).collect()
);
}
_ => {
span_err!(tcx.sess, base_expr.span, E0436,
"functional record update syntax requires a struct");
}
} }
} }
} }

View file

@ -43,6 +43,7 @@ pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &hir::Expr) {
wbcx.visit_upvar_borrow_map(); wbcx.visit_upvar_borrow_map();
wbcx.visit_closures(); wbcx.visit_closures();
wbcx.visit_liberated_fn_sigs(); wbcx.visit_liberated_fn_sigs();
wbcx.visit_fru_field_types();
} }
pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
@ -64,6 +65,7 @@ pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
wbcx.visit_upvar_borrow_map(); wbcx.visit_upvar_borrow_map();
wbcx.visit_closures(); wbcx.visit_closures();
wbcx.visit_liberated_fn_sigs(); wbcx.visit_liberated_fn_sigs();
wbcx.visit_fru_field_types();
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
@ -371,6 +373,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
} }
} }
fn visit_fru_field_types(&self) {
for (&node_id, ftys) in self.fcx.inh.tables.borrow().fru_field_types.iter() {
let ftys = self.resolve(ftys, ResolvingFieldTypes(node_id));
self.tcx().tables.borrow_mut().fru_field_types.insert(node_id, ftys);
}
}
fn resolve<T:TypeFoldable<'tcx>>(&self, t: &T, reason: ResolveReason) -> T { fn resolve<T:TypeFoldable<'tcx>>(&self, t: &T, reason: ResolveReason) -> T {
t.fold_with(&mut Resolver::new(self.fcx, reason)) t.fold_with(&mut Resolver::new(self.fcx, reason))
} }
@ -387,6 +396,7 @@ enum ResolveReason {
ResolvingUpvar(ty::UpvarId), ResolvingUpvar(ty::UpvarId),
ResolvingClosure(DefId), ResolvingClosure(DefId),
ResolvingFnSig(ast::NodeId), ResolvingFnSig(ast::NodeId),
ResolvingFieldTypes(ast::NodeId)
} }
impl ResolveReason { impl ResolveReason {
@ -401,6 +411,9 @@ impl ResolveReason {
ResolvingFnSig(id) => { ResolvingFnSig(id) => {
tcx.map.span(id) tcx.map.span(id)
} }
ResolvingFieldTypes(id) => {
tcx.map.span(id)
}
ResolvingClosure(did) => { ResolvingClosure(did) => {
if let Some(node_id) = tcx.map.as_local_node_id(did) { if let Some(node_id) = tcx.map.as_local_node_id(did) {
tcx.expr_span(node_id) tcx.expr_span(node_id)
@ -478,14 +491,14 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
"cannot determine a type for this closure") "cannot determine a type for this closure")
} }
ResolvingFnSig(id) => { ResolvingFnSig(id) | ResolvingFieldTypes(id) => {
// any failures here should also fail when // any failures here should also fail when
// resolving the patterns, closure types, or // resolving the patterns, closure types, or
// something else. // something else.
let span = self.reason.span(self.tcx); let span = self.reason.span(self.tcx);
self.tcx.sess.delay_span_bug( self.tcx.sess.delay_span_bug(
span, span,
&format!("cannot resolve some aspect of fn sig for {:?}", id)); &format!("cannot resolve some aspect of data for {:?}", id));
} }
} }
} }

View file

@ -0,0 +1,41 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
use std::marker::PhantomData;
pub trait DataBind {
type Data;
}
impl<T> DataBind for Global<T> {
type Data = T;
}
pub struct Global<T>(PhantomData<T>);
pub struct Data {
pub offsets: <Global<[u32; 2]> as DataBind>::Data,
}
#[rustc_mir]
fn create_data() -> Data {
let mut d = Data { offsets: [1, 2] };
d.offsets[0] = 3;
d
}
fn main() {
let d = create_data();
assert_eq!(d.offsets[0], 3);
assert_eq!(d.offsets[1], 2);
}