store the normalized types of field accesses

Fixes #31504
This commit is contained in:
Ariel Ben-Yehuda 2016-02-11 18:31:42 +02:00 committed by Ariel Ben-Yehuda
parent 350b50df00
commit 3c6f41026b
15 changed files with 150 additions and 56 deletions

View File

@ -131,6 +131,12 @@ pub struct Tables<'tcx> {
/// equivalents. This table is not used in trans (since regions
/// are erased there) and hence is not serialized to metadata.
pub liberated_fn_sigs: NodeMap<ty::FnSig<'tcx>>,
/// For each FRU expression, record the normalized types of the fields
/// of the struct - this is needed because it is non-trivial to
/// normalize while preserving regions. This table is used only in
/// MIR construction and hence is not serialized to metadata.
pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>
}
impl<'tcx> Tables<'tcx> {
@ -144,6 +150,7 @@ impl<'tcx> Tables<'tcx> {
closure_tys: DefIdMap(),
closure_kinds: DefIdMap(),
liberated_fn_sigs: NodeMap(),
fru_field_types: NodeMap()
}
}

View File

@ -502,7 +502,7 @@ pub struct Projection<'tcx, B, V> {
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum ProjectionElem<'tcx, V> {
Deref,
Field(Field),
Field(Field, Ty<'tcx>),
Index(V),
/// These indices are generated by slice patterns. Easiest to explain
@ -553,8 +553,8 @@ impl Field {
}
impl<'tcx> Lvalue<'tcx> {
pub fn field(self, f: Field) -> Lvalue<'tcx> {
self.elem(ProjectionElem::Field(f))
pub fn field(self, f: Field, ty: Ty<'tcx>) -> Lvalue<'tcx> {
self.elem(ProjectionElem::Field(f, ty))
}
pub fn deref(self) -> Lvalue<'tcx> {
@ -594,8 +594,8 @@ impl<'tcx> Debug for Lvalue<'tcx> {
write!(fmt, "({:?} as {})", data.base, adt_def.variants[index].name),
ProjectionElem::Deref =>
write!(fmt, "(*{:?})", data.base),
ProjectionElem::Field(field) =>
write!(fmt, "{:?}.{:?}", data.base, field.index()),
ProjectionElem::Field(field, ty) =>
write!(fmt, "({:?}.{:?}: {:?})", data.base, field.index(), ty),
ProjectionElem::Index(ref index) =>
write!(fmt, "{:?}[{:?}]", data.base, index),
ProjectionElem::ConstantIndex { offset, min_length, from_end: false } =>

View File

@ -73,23 +73,7 @@ impl<'tcx> LvalueTy<'tcx> {
tcx.sess.bug(&format!("cannot downcast non-enum type: `{:?}`", self))
}
},
ProjectionElem::Field(field) => {
let field_ty = match self {
LvalueTy::Ty { ty } => match ty.sty {
ty::TyStruct(adt_def, substs) =>
adt_def.struct_variant().fields[field.index()].ty(tcx, substs),
ty::TyTuple(ref tys) =>
tys[field.index()],
ty::TyClosure(_, ref closure_substs) =>
closure_substs.upvar_tys[field.index()],
_ =>
tcx.sess.bug(&format!("cannot get field of type: `{:?}`", ty)),
},
LvalueTy::Downcast { adt_def, substs, variant_index } =>
adt_def.variants[variant_index].fields[field.index()].ty(tcx, substs),
};
LvalueTy::Ty { ty: field_ty }
}
ProjectionElem::Field(_, fty) => LvalueTy::Ty { ty: fty }
}
}
}

View File

@ -41,7 +41,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
}
ExprKind::Field { lhs, name } => {
let lvalue = unpack!(block = this.as_lvalue(block, lhs));
let lvalue = lvalue.field(name);
let lvalue = lvalue.field(name, expr.ty);
block.and(lvalue)
}
ExprKind::Deref { arg } => {

View File

@ -139,7 +139,9 @@ impl<'a,'tcx> Builder<'a,'tcx> {
.collect();
block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))
}
ExprKind::Adt { adt_def, variant_index, substs, fields, base } => { // see (*) above
ExprKind::Adt {
adt_def, variant_index, substs, fields, base
} => { // see (*) above
// first process the set of fields that were provided
// (evaluating them in order given by user)
let fields_map: FnvHashMap<_, _> =
@ -147,25 +149,20 @@ impl<'a,'tcx> Builder<'a,'tcx> {
.map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))
.collect();
// if base expression is given, evaluate it now
let base = base.map(|base| unpack!(block = this.as_lvalue(block, base)));
// get list of all fields that we will need
let field_names = this.hir.all_fields(adt_def, variant_index);
// for the actual values we use, take either the
// expr the user specified or, if they didn't
// specify something for this field name, create a
// path relative to the base (which must have been
// supplied, or the IR is internally
// inconsistent).
let fields: Vec<_> =
let fields = if let Some(FruInfo { base, field_types }) = base {
let base = unpack!(block = this.as_lvalue(block, base));
field_names.into_iter()
.map(|n| match fields_map.get(&n) {
Some(v) => v.clone(),
None => Operand::Consume(base.clone().unwrap().field(n)),
})
.collect();
.zip(field_types.into_iter())
.map(|(n, ty)| match fields_map.get(&n) {
Some(v) => v.clone(),
None => Operand::Consume(base.clone().field(n, ty))
})
.collect()
} else {
field_names.iter().map(|n| fields_map[n].clone()).collect()
};
block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs),
fields))

View File

@ -404,7 +404,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
subpatterns.iter()
.map(|subpattern| {
// e.g., `(x as Variant).0`
let lvalue = downcast_lvalue.clone().field(subpattern.field);
let lvalue = downcast_lvalue.clone().field(subpattern.field,
subpattern.field_ty());
// e.g., `(x as Variant).0 @ P1`
MatchPair::new(lvalue, &subpattern.pattern)
});

View File

@ -21,7 +21,8 @@ impl<'a,'tcx> Builder<'a,'tcx> {
-> Vec<MatchPair<'pat, 'tcx>> {
subpatterns.iter()
.map(|fieldpat| {
let lvalue = lvalue.clone().field(fieldpat.field);
let lvalue = lvalue.clone().field(fieldpat.field,
fieldpat.field_ty());
MatchPair::new(lvalue, &fieldpat.pattern)
})
.collect()

View File

@ -248,13 +248,23 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
variant_index: 0,
substs: substs,
fields: field_refs,
base: base.to_ref(),
base: base.as_ref().map(|base| {
FruInfo {
base: base.to_ref(),
field_types: cx.tcx.tables
.borrow()
.fru_field_types[&self.id]
.clone()
}
})
}
}
ty::TyEnum(adt, substs) => {
match cx.tcx.def_map.borrow()[&self.id].full_def() {
Def::Variant(enum_id, variant_id) => {
debug_assert!(adt.did == enum_id);
assert!(base.is_none());
let index = adt.variant_index_with_id(variant_id);
let field_refs = field_refs(&adt.variants[index], fields);
ExprKind::Adt {
@ -262,7 +272,7 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
variant_index: index,
substs: substs,
fields: field_refs,
base: base.to_ref(),
base: None
}
}
ref def => {
@ -810,11 +820,16 @@ fn convert_var<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>,
};
match upvar_capture {
ty::UpvarCapture::ByValue => field_kind,
ty::UpvarCapture::ByRef(_) => {
ty::UpvarCapture::ByRef(borrow) => {
ExprKind::Deref {
arg: Expr {
temp_lifetime: temp_lifetime,
ty: var_ty,
ty: cx.tcx.mk_ref(
cx.tcx.mk_region(borrow.region),
ty::TypeAndMut {
ty: var_ty,
mutbl: borrow.kind.to_mutbl_lossy()
}),
span: expr.span,
kind: field_kind,
}.to_ref()

View File

@ -314,3 +314,20 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> {
}
}
}
impl<'tcx> FieldPattern<'tcx> {
pub fn field_ty(&self) -> Ty<'tcx> {
debug!("field_ty({:?},ty={:?})", self, self.pattern.ty);
let r = match *self.pattern.kind {
PatternKind::Binding { mode: BindingMode::ByRef(..), ..} => {
match self.pattern.ty.sty {
ty::TyRef(_, mt) => mt.ty,
_ => unreachable!()
}
}
_ => self.pattern.ty
};
debug!("field_ty -> {:?}", r);
r
}
}

View File

@ -229,7 +229,7 @@ pub enum ExprKind<'tcx> {
variant_index: usize,
substs: &'tcx Substs<'tcx>,
fields: Vec<FieldExprRef<'tcx>>,
base: Option<ExprRef<'tcx>>,
base: Option<FruInfo<'tcx>>
},
Closure {
closure_id: DefId,
@ -256,6 +256,12 @@ pub struct FieldExprRef<'tcx> {
pub expr: ExprRef<'tcx>,
}
#[derive(Clone, Debug)]
pub struct FruInfo<'tcx> {
pub base: ExprRef<'tcx>,
pub field_types: Vec<Ty<'tcx>>
}
#[derive(Clone, Debug)]
pub struct Arm<'tcx> {
pub patterns: Vec<Pattern<'tcx>>,

View File

@ -47,6 +47,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'tcx> {
fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: visit::LvalueContext) {
self.super_lvalue(lvalue, context);
debug!("visiting lvalue {:?}", lvalue);
let lv_ty = self.mir.lvalue_ty(self.tcx(), lvalue).to_ty(self.tcx());
self.sanitize_type(lvalue, lv_ty);
}

View File

@ -126,7 +126,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
})
}
mir::ProjectionElem::Field(ref field) => {
mir::ProjectionElem::Field(ref field, _) => {
let base_ty = tr_base.ty.to_ty(tcx);
let base_repr = adt::represent_type(ccx, base_ty);
let discr = match tr_base.ty {

View File

@ -3179,8 +3179,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
return;
}
let (adt, variant) = match fcx.def_struct_variant(def, path.span) {
Some((adt, variant)) => (adt, variant),
let variant = match fcx.def_struct_variant(def, path.span) {
Some((_, variant)) => variant,
None => {
span_err!(fcx.tcx().sess, path.span, E0071,
"`{}` does not name a structure",
@ -3195,12 +3195,23 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
base_expr.is_none());
if let &Some(ref base_expr) = base_expr {
check_expr_has_type(fcx, base_expr, expr_ty);
if adt.adt_kind() == ty::AdtKind::Enum {
span_err!(tcx.sess, base_expr.span, E0436,
"functional record update syntax requires a struct");
match expr_ty.sty {
ty::TyStruct(adt, substs) => {
fcx.inh.tables.borrow_mut().fru_field_types.insert(
expr.id,
adt.struct_variant().fields.iter().map(|f| {
fcx.normalize_associated_types_in(
expr.span, &f.ty(tcx, substs)
)
}).collect()
);
}
_ => {
span_err!(tcx.sess, base_expr.span, E0436,
"functional record update syntax requires a struct");
}
}
}
}

View File

@ -43,6 +43,7 @@ pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &hir::Expr) {
wbcx.visit_upvar_borrow_map();
wbcx.visit_closures();
wbcx.visit_liberated_fn_sigs();
wbcx.visit_fru_field_types();
}
pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
@ -64,6 +65,7 @@ pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
wbcx.visit_upvar_borrow_map();
wbcx.visit_closures();
wbcx.visit_liberated_fn_sigs();
wbcx.visit_fru_field_types();
}
///////////////////////////////////////////////////////////////////////////
@ -371,6 +373,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
}
}
fn visit_fru_field_types(&self) {
for (&node_id, ftys) in self.fcx.inh.tables.borrow().fru_field_types.iter() {
let ftys = self.resolve(ftys, ResolvingFieldTypes(node_id));
self.tcx().tables.borrow_mut().fru_field_types.insert(node_id, ftys);
}
}
fn resolve<T:TypeFoldable<'tcx>>(&self, t: &T, reason: ResolveReason) -> T {
t.fold_with(&mut Resolver::new(self.fcx, reason))
}
@ -387,6 +396,7 @@ enum ResolveReason {
ResolvingUpvar(ty::UpvarId),
ResolvingClosure(DefId),
ResolvingFnSig(ast::NodeId),
ResolvingFieldTypes(ast::NodeId)
}
impl ResolveReason {
@ -401,6 +411,9 @@ impl ResolveReason {
ResolvingFnSig(id) => {
tcx.map.span(id)
}
ResolvingFieldTypes(id) => {
tcx.map.span(id)
}
ResolvingClosure(did) => {
if let Some(node_id) = tcx.map.as_local_node_id(did) {
tcx.expr_span(node_id)
@ -478,14 +491,14 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
"cannot determine a type for this closure")
}
ResolvingFnSig(id) => {
ResolvingFnSig(id) | ResolvingFieldTypes(id) => {
// any failures here should also fail when
// resolving the patterns, closure types, or
// something else.
let span = self.reason.span(self.tcx);
self.tcx.sess.delay_span_bug(
span,
&format!("cannot resolve some aspect of fn sig for {:?}", id));
&format!("cannot resolve some aspect of data for {:?}", id));
}
}
}

View File

@ -0,0 +1,41 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
use std::marker::PhantomData;
pub trait DataBind {
type Data;
}
impl<T> DataBind for Global<T> {
type Data = T;
}
pub struct Global<T>(PhantomData<T>);
pub struct Data {
pub offsets: <Global<[u32; 2]> as DataBind>::Data,
}
#[rustc_mir]
fn create_data() -> Data {
let mut d = Data { offsets: [1, 2] };
d.offsets[0] = 3;
d
}
fn main() {
let d = create_data();
assert_eq!(d.offsets[0], 3);
assert_eq!(d.offsets[1], 2);
}