tree-ssa-alias.c (ao_ref_init): New function.

2009-05-28  Richard Guenther  <rguenther@suse.de>

	* tree-ssa-alias.c (ao_ref_init): New function.
	(ao_ref_base): Likewise.
	(ao_ref_base_alias_set): Likewise.
	(ao_ref_alias_set): Likewise.
	(refs_may_alias_p_1): Change signature.
	(refs_may_alias_p): Adjust.
	(refs_anti_dependent_p): Likewise.
	(refs_output_dependent_p): Likewise.
	(call_may_clobber_ref_p_1): Change signature.
	(call_may_clobber_ref_p): Adjust.
	(stmt_may_clobber_ref_p_1): New function split out from ...
	(stmt_may_clobber_ref_p): ... here.
	(maybe_skip_until): Adjust signature.
	(get_continuation_for_phi): Likewise.
	(walk_non_aliased_vuses): Likewise.
	* tree-ssa-alias.h (struct ao_ref_s): New structure type.
	(ao_ref_init): Declare.
	(ao_ref_base): Likewise.
	(ao_ref_alias_set): Likewise.
	(stmt_may_clobber_ref_p_1): Likewise.
	(walk_non_aliased_vuses): Adjust.
	* tree-ssa-sccvn.c (ao_ref_init_from_vn_reference): New function.
	(get_ref_from_reference_ops): remove.
	(vn_reference_lookup_2): Adjust signature.
	(vn_reference_lookup_3): Do not re-build trees.  Handle unions.
	(vn_reference_lookup_pieces): Adjust signature, do not re-build
	trees.
	(vn_reference_lookup): Adjust.
	(vn_reference_insert): Likewise.
	(vn_reference_insert_pieces): Adjust signature.
	(visit_reference_op_call): Adjust.
	* tree-ssa-pre.c (get_expr_type): Simplify.
	(phi_translate_1): Adjust.
	(compute_avail): Likewise.
	(translate_vuse_through_block): Do not re-build trees.
	(value_dies_in_block_x): Likewise.
	* tree-ssa-sccvn.h (struct vn_reference_s): Add type and alias-set
	fields.
	(vn_reference_lookup_pieces): Adjust declaration.
	(vn_reference_insert_pieces): Likewise.

	* gcc.dg/tree-ssa/ssa-fre-26.c: New testcase.
	* gcc.c-torture/execute/20090527-1.c: Likewise.

From-SVN: r147953
This commit is contained in:
Richard Guenther 2009-05-28 13:24:16 +00:00 committed by Richard Biener
parent 554223b6bd
commit b45d27197f
9 changed files with 487 additions and 157 deletions

View File

@ -1,3 +1,46 @@
2009-05-28 Richard Guenther <rguenther@suse.de>
* tree-ssa-alias.c (ao_ref_init): New function.
(ao_ref_base): Likewise.
(ao_ref_base_alias_set): Likewise.
(ao_ref_alias_set): Likewise.
(refs_may_alias_p_1): Change signature.
(refs_may_alias_p): Adjust.
(refs_anti_dependent_p): Likewise.
(refs_output_dependent_p): Likewise.
(call_may_clobber_ref_p_1): Change signature.
(call_may_clobber_ref_p): Adjust.
(stmt_may_clobber_ref_p_1): New function split out from ...
(stmt_may_clobber_ref_p): ... here.
(maybe_skip_until): Adjust signature.
(get_continuation_for_phi): Likewise.
(walk_non_aliased_vuses): Likewise.
* tree-ssa-alias.h (struct ao_ref_s): New structure type.
(ao_ref_init): Declare.
(ao_ref_base): Likewise.
(ao_ref_alias_set): Likewise.
(stmt_may_clobber_ref_p_1): Likewise.
(walk_non_aliased_vuses): Adjust.
* tree-ssa-sccvn.c (ao_ref_init_from_vn_reference): New function.
(get_ref_from_reference_ops): remove.
(vn_reference_lookup_2): Adjust signature.
(vn_reference_lookup_3): Do not re-build trees. Handle unions.
(vn_reference_lookup_pieces): Adjust signature, do not re-build
trees.
(vn_reference_lookup): Adjust.
(vn_reference_insert): Likewise.
(vn_reference_insert_pieces): Adjust signature.
(visit_reference_op_call): Adjust.
* tree-ssa-pre.c (get_expr_type): Simplify.
(phi_translate_1): Adjust.
(compute_avail): Likewise.
(translate_vuse_through_block): Do not re-build trees.
(value_dies_in_block_x): Likewise.
* tree-ssa-sccvn.h (struct vn_reference_s): Add type and alias-set
fields.
(vn_reference_lookup_pieces): Adjust declaration.
(vn_reference_insert_pieces): Likewise.
2009-05-28 Benjamin Kosnik <bkoz@redhat.com>
* tree-ssa-copy.c (replace_exp_1): Move op for warning-free use

View File

@ -1,3 +1,8 @@
2009-05-28 Richard Guenther <rguenther@suse.de>
* gcc.dg/tree-ssa/ssa-fre-26.c: New testcase.
* gcc.c-torture/execute/20090527-1.c: Likewise.
2009-05-28 Dodji Seketeli <dodji@redhat.com>
PR c++/39754

View File

@ -0,0 +1,38 @@
typedef enum { POSITION_ASIS, POSITION_UNSPECIFIED } unit_position;
typedef enum { STATUS_UNKNOWN, STATUS_UNSPECIFIED } unit_status;
typedef struct
{
unit_position position;
unit_status status;
} unit_flags;
extern void abort (void);
void
new_unit (unit_flags * flags)
{
if (flags->status == STATUS_UNSPECIFIED)
flags->status = STATUS_UNKNOWN;
if (flags->position == POSITION_UNSPECIFIED)
flags->position = POSITION_ASIS;
switch (flags->status)
{
case STATUS_UNKNOWN:
break;
default:
abort ();
}
}
int main()
{
unit_flags f;
f.status = STATUS_UNSPECIFIED;
new_unit (&f);
return 0;
}

View File

@ -0,0 +1,18 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-fre-details" } */
union U {
float f;
int i;
};
int foo (union U *p)
{
union U u;
p->f = 0.0;
u = *p;
return u.i;
}
/* { dg-final { scan-tree-dump "Replaced u.i with 0 in" "fre" } } */
/* { dg-final { cleanup-tree-dump "fre" } } */

View File

@ -439,6 +439,55 @@ debug_points_to_info_for (tree var)
dump_points_to_info_for (stderr, var);
}
/* Initializes the alias-oracle reference representation *R from REF. */
void
ao_ref_init (ao_ref *r, tree ref)
{
r->ref = ref;
r->base = NULL_TREE;
r->offset = 0;
r->size = -1;
r->max_size = -1;
r->ref_alias_set = -1;
r->base_alias_set = -1;
}
/* Returns the base object of the memory reference *REF. */
tree
ao_ref_base (ao_ref *ref)
{
if (ref->base)
return ref->base;
ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
&ref->max_size);
return ref->base;
}
/* Returns the base object alias set of the memory reference *REF. */
static alias_set_type ATTRIBUTE_UNUSED
ao_ref_base_alias_set (ao_ref *ref)
{
if (ref->base_alias_set != -1)
return ref->base_alias_set;
ref->base_alias_set = get_alias_set (ao_ref_base (ref));
return ref->base_alias_set;
}
/* Returns the reference alias set of the memory reference *REF. */
alias_set_type
ao_ref_alias_set (ao_ref *ref)
{
if (ref->ref_alias_set != -1)
return ref->ref_alias_set;
ref->ref_alias_set = get_alias_set (ref->ref);
return ref->ref_alias_set;
}
/* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
decide. */
@ -675,7 +724,7 @@ indirect_refs_may_alias_p (tree ref1, tree ptr1,
/* Return true, if the two memory references REF1 and REF2 may alias. */
static bool
refs_may_alias_p_1 (tree ref1, tree ref2, bool tbaa_p)
refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
{
tree base1, base2;
HOST_WIDE_INT offset1 = 0, offset2 = 0;
@ -684,18 +733,26 @@ refs_may_alias_p_1 (tree ref1, tree ref2, bool tbaa_p)
bool var1_p, var2_p, ind1_p, ind2_p;
alias_set_type set;
gcc_assert ((SSA_VAR_P (ref1)
|| handled_component_p (ref1)
|| INDIRECT_REF_P (ref1)
|| TREE_CODE (ref1) == TARGET_MEM_REF)
&& (SSA_VAR_P (ref2)
|| handled_component_p (ref2)
|| INDIRECT_REF_P (ref2)
|| TREE_CODE (ref2) == TARGET_MEM_REF));
gcc_assert ((!ref1->ref
|| SSA_VAR_P (ref1->ref)
|| handled_component_p (ref1->ref)
|| INDIRECT_REF_P (ref1->ref)
|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
&& (!ref2->ref
|| SSA_VAR_P (ref2->ref)
|| handled_component_p (ref2->ref)
|| INDIRECT_REF_P (ref2->ref)
|| TREE_CODE (ref2->ref) == TARGET_MEM_REF));
/* Decompose the references into their base objects and the access. */
base1 = get_ref_base_and_extent (ref1, &offset1, &size1, &max_size1);
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &max_size2);
base1 = ao_ref_base (ref1);
offset1 = ref1->offset;
size1 = ref1->size;
max_size1 = ref1->max_size;
base2 = ao_ref_base (ref2);
offset2 = ref2->offset;
size2 = ref2->size;
max_size2 = ref2->max_size;
/* We can end up with registers or constants as bases for example from
*D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
@ -719,14 +776,15 @@ refs_may_alias_p_1 (tree ref1, tree ref2, bool tbaa_p)
/* First defer to TBAA if possible. */
if (tbaa_p
&& flag_strict_aliasing
&& !alias_sets_conflict_p (get_alias_set (ref1), get_alias_set (ref2)))
&& !alias_sets_conflict_p (ao_ref_alias_set (ref1),
ao_ref_alias_set (ref2)))
return false;
/* If one reference is a TARGET_MEM_REF weird things are allowed. Still
TBAA disambiguation based on the access type is possible, so bail
out only after that check. */
if (TREE_CODE (ref1) == TARGET_MEM_REF
|| TREE_CODE (ref2) == TARGET_MEM_REF)
if ((ref1->ref && TREE_CODE (ref1->ref) == TARGET_MEM_REF)
|| (ref2->ref && TREE_CODE (ref2->ref) == TARGET_MEM_REF))
return true;
/* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
@ -734,19 +792,19 @@ refs_may_alias_p_1 (tree ref1, tree ref2, bool tbaa_p)
ind2_p = INDIRECT_REF_P (base2);
set = tbaa_p ? -1 : 0;
if (var1_p && ind2_p)
return indirect_ref_may_alias_decl_p (ref2, TREE_OPERAND (base2, 0),
return indirect_ref_may_alias_decl_p (ref2->ref, TREE_OPERAND (base2, 0),
offset2, max_size2, set,
ref1, base1,
ref1->ref, base1,
offset1, max_size1, set);
else if (ind1_p && var2_p)
return indirect_ref_may_alias_decl_p (ref1, TREE_OPERAND (base1, 0),
return indirect_ref_may_alias_decl_p (ref1->ref, TREE_OPERAND (base1, 0),
offset1, max_size1, set,
ref2, base2,
ref2->ref, base2,
offset2, max_size2, set);
else if (ind1_p && ind2_p)
return indirect_refs_may_alias_p (ref1, TREE_OPERAND (base1, 0),
return indirect_refs_may_alias_p (ref1->ref, TREE_OPERAND (base1, 0),
offset1, max_size1, set,
ref2, TREE_OPERAND (base2, 0),
ref2->ref, TREE_OPERAND (base2, 0),
offset2, max_size2, set);
gcc_unreachable ();
@ -755,7 +813,11 @@ refs_may_alias_p_1 (tree ref1, tree ref2, bool tbaa_p)
bool
refs_may_alias_p (tree ref1, tree ref2)
{
bool res = refs_may_alias_p_1 (ref1, ref2, true);
ao_ref r1, r2;
bool res;
ao_ref_init (&r1, ref1);
ao_ref_init (&r2, ref2);
res = refs_may_alias_p_1 (&r1, &r2, true);
if (res)
++alias_stats.refs_may_alias_p_may_alias;
else
@ -769,7 +831,10 @@ refs_may_alias_p (tree ref1, tree ref2)
bool
refs_anti_dependent_p (tree load, tree store)
{
return refs_may_alias_p_1 (load, store, false);
ao_ref r1, r2;
ao_ref_init (&r1, load);
ao_ref_init (&r2, store);
return refs_may_alias_p_1 (&r1, &r2, false);
}
/* Returns true if there is a output dependence for the stores
@ -778,7 +843,10 @@ refs_anti_dependent_p (tree load, tree store)
bool
refs_output_dependent_p (tree store1, tree store2)
{
return refs_may_alias_p_1 (store1, store2, false);
ao_ref r1, r2;
ao_ref_init (&r1, store1);
ao_ref_init (&r2, store2);
return refs_may_alias_p_1 (&r1, &r2, false);
}
/* If the call CALL may use the memory reference REF return true,
@ -907,7 +975,7 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
return true, otherwise return false. */
static bool
call_may_clobber_ref_p_1 (gimple call, tree ref)
call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
{
tree base;
@ -916,7 +984,7 @@ call_may_clobber_ref_p_1 (gimple call, tree ref)
& (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
return false;
base = get_base_address (ref);
base = ao_ref_base (ref);
if (!base)
return true;
@ -957,10 +1025,13 @@ call_may_clobber_ref_p_1 (gimple call, tree ref)
return true;
}
static bool
static bool ATTRIBUTE_UNUSED
call_may_clobber_ref_p (gimple call, tree ref)
{
bool res = call_may_clobber_ref_p_1 (call, ref);
bool res;
ao_ref r;
ao_ref_init (&r, ref);
res = call_may_clobber_ref_p_1 (call, &r);
if (res)
++alias_stats.call_may_clobber_ref_p_may_alias;
else
@ -973,34 +1044,52 @@ call_may_clobber_ref_p (gimple call, tree ref)
otherwise return false. */
bool
stmt_may_clobber_ref_p (gimple stmt, tree ref)
stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
{
if (is_gimple_call (stmt))
{
tree lhs = gimple_call_lhs (stmt);
if (lhs
&& !is_gimple_reg (lhs)
&& refs_may_alias_p (ref, lhs))
return true;
&& !is_gimple_reg (lhs))
{
ao_ref r;
ao_ref_init (&r, lhs);
if (refs_may_alias_p_1 (ref, &r, true))
return true;
}
return call_may_clobber_ref_p (stmt, ref);
return call_may_clobber_ref_p_1 (stmt, ref);
}
else if (is_gimple_assign (stmt))
return refs_may_alias_p (ref, gimple_assign_lhs (stmt));
{
ao_ref r;
ao_ref_init (&r, gimple_assign_lhs (stmt));
return refs_may_alias_p_1 (ref, &r, true);
}
else if (gimple_code (stmt) == GIMPLE_ASM)
return true;
return false;
}
static tree get_continuation_for_phi (gimple, tree, bitmap *);
bool
stmt_may_clobber_ref_p (gimple stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
return stmt_may_clobber_ref_p_1 (stmt, &r);
}
static tree get_continuation_for_phi (gimple, ao_ref *, bitmap *);
/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
TARGET or a statement clobbering the memory reference REF in which
case false is returned. The walk starts with VUSE, one argument of PHI. */
static bool
maybe_skip_until (gimple phi, tree target, tree ref, tree vuse, bitmap *visited)
maybe_skip_until (gimple phi, tree target, ao_ref *ref,
tree vuse, bitmap *visited)
{
if (!*visited)
*visited = BITMAP_ALLOC (NULL);
@ -1024,7 +1113,7 @@ maybe_skip_until (gimple phi, tree target, tree ref, tree vuse, bitmap *visited)
}
/* A clobbering statement or the end of the IL ends it failing. */
else if (gimple_nop_p (def_stmt)
|| stmt_may_clobber_ref_p (def_stmt, ref))
|| stmt_may_clobber_ref_p_1 (def_stmt, ref))
return false;
vuse = gimple_vuse (def_stmt);
}
@ -1038,7 +1127,7 @@ maybe_skip_until (gimple phi, tree target, tree ref, tree vuse, bitmap *visited)
be found. */
static tree
get_continuation_for_phi (gimple phi, tree ref, bitmap *visited)
get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
{
unsigned nargs = gimple_phi_num_args (phi);
@ -1096,9 +1185,9 @@ get_continuation_for_phi (gimple phi, tree ref, bitmap *visited)
TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
void *
walk_non_aliased_vuses (tree ref, tree vuse,
void *(*walker)(tree, tree, void *),
void *(*translate)(tree *, tree, void *),void *data)
walk_non_aliased_vuses (ao_ref *ref, tree vuse,
void *(*walker)(ao_ref *, tree, void *),
void *(*translate)(ao_ref *, tree, void *), void *data)
{
bitmap visited = NULL;
void *res;
@ -1121,11 +1210,11 @@ walk_non_aliased_vuses (tree ref, tree vuse,
vuse = get_continuation_for_phi (def_stmt, ref, &visited);
else
{
if (stmt_may_clobber_ref_p (def_stmt, ref))
if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
{
if (!translate)
break;
res = (*translate) (&ref, vuse, data);
res = (*translate) (ref, vuse, data);
/* Failed lookup and translation. */
if (res == (void *)-1)
{

View File

@ -74,7 +74,39 @@ struct GTY(()) pt_solution
};
/* Simplified and cached information about a memory reference tree.
Used by the alias-oracle internally and externally in alternate
interfaces. */
typedef struct ao_ref_s
{
/* The original full memory reference tree or NULL_TREE if that is
not available. */
tree ref;
/* The following fields are the decomposed reference as returned
by get_ref_base_and_extent. */
/* The base object of the memory reference or NULL_TREE if all of
the following fields are not yet computed. */
tree base;
/* The offset relative to the base. */
HOST_WIDE_INT offset;
/* The size of the access. */
HOST_WIDE_INT size;
/* The maximum possible extent of the access or -1 if unconstrained. */
HOST_WIDE_INT max_size;
/* The alias set of the access or -1 if not yet computed. */
alias_set_type ref_alias_set;
/* The alias set of the base object or -1 if not yet computed. */
alias_set_type base_alias_set;
} ao_ref;
/* In tree-ssa-alias.c */
extern void ao_ref_init (ao_ref *, tree);
extern tree ao_ref_base (ao_ref *);
extern alias_set_type ao_ref_alias_set (ao_ref *);
extern enum escape_type is_escape_site (gimple);
extern bool ptr_deref_may_alias_global_p (tree);
extern bool refs_may_alias_p (tree, tree);
@ -82,9 +114,10 @@ extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, tree);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern void *walk_non_aliased_vuses (tree, tree,
void *(*)(tree, tree, void *),
void *(*)(tree *, tree, void *), void *);
extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
extern void *walk_non_aliased_vuses (ao_ref *, tree,
void *(*)(ao_ref *, tree, void *),
void *(*)(ao_ref *, tree, void *), void *);
extern unsigned int walk_aliased_vdefs (tree, tree,
bool (*)(tree, tree, void *), void *,
bitmap *);

View File

@ -1252,12 +1252,12 @@ do_unary:
static tree
translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
tree vuse,
alias_set_type set, tree type, tree vuse,
basic_block phiblock,
basic_block block)
{
gimple phi = SSA_NAME_DEF_STMT (vuse);
tree ref;
ao_ref ref;
if (gimple_bb (phi) != phiblock)
return vuse;
@ -1268,13 +1268,13 @@ translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
return PHI_ARG_DEF (phi, e->dest_idx);
}
if (!(ref = get_ref_from_reference_ops (operands)))
if (!ao_ref_init_from_vn_reference (&ref, set, type, operands))
return NULL_TREE;
/* Use the alias-oracle to find either the PHI node in this block,
the first VUSE used in this block that is equivalent to vuse or
the first VUSE which definition in this block kills the value. */
while (!stmt_may_clobber_ref_p (phi, ref))
while (!stmt_may_clobber_ref_p_1 (phi, &ref))
{
vuse = gimple_vuse (phi);
phi = SSA_NAME_DEF_STMT (vuse);
@ -1317,23 +1317,7 @@ get_expr_type (const pre_expr e)
case CONSTANT:
return TREE_TYPE (PRE_EXPR_CONSTANT (e));
case REFERENCE:
{
vn_reference_op_t vro;
gcc_assert (PRE_EXPR_REFERENCE (e)->operands);
vro = VEC_index (vn_reference_op_s,
PRE_EXPR_REFERENCE (e)->operands,
0);
/* We don't store type along with COMPONENT_REF because it is
always the same as FIELD_DECL's type. */
if (!vro->type)
{
gcc_assert (vro->opcode == COMPONENT_REF);
return TREE_TYPE (vro->op0);
}
return vro->type;
}
return PRE_EXPR_REFERENCE (e)->type;
case NARY:
return PRE_EXPR_NARY (e)->type;
}
@ -1661,6 +1645,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
if (vuse)
{
newvuse = translate_vuse_through_block (newoperands,
ref->set, ref->type,
vuse, phiblock, pred);
if (newvuse == NULL_TREE)
{
@ -1675,7 +1660,8 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
unsigned int new_val_id;
pre_expr constant;
tree result = vn_reference_lookup_pieces (newvuse,
tree result = vn_reference_lookup_pieces (newvuse, ref->set,
ref->type,
newoperands,
&newref, true);
if (newref)
@ -1706,7 +1692,8 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
new_val_id = get_next_value_id ();
VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
get_max_value_id() + 1);
newref = vn_reference_insert_pieces (newvuse,
newref = vn_reference_insert_pieces (newvuse, ref->set,
ref->type,
newoperands,
result, new_val_id);
newoperands = NULL;
@ -1884,10 +1871,10 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
gimple def;
tree ref = NULL_TREE;
gimple_stmt_iterator gsi;
unsigned id = get_expression_id (expr);
bool res = false;
ao_ref ref;
if (!vuse)
return false;
@ -1902,6 +1889,7 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
top of the basic block, a statement uses VUSE there can be no kill
inbetween that use and the original statement that loaded {e, VUSE},
so we can stop walking. */
ref.base = NULL_TREE;
for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree def_vuse, def_vdef;
@ -1924,16 +1912,15 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
}
/* Init ref only if we really need it. */
if (ref == NULL_TREE)
if (ref.base == NULL_TREE
&& !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
refx->operands))
{
if (!(ref = get_ref_from_reference_ops (refx->operands)))
{
res = true;
break;
}
res = true;
break;
}
/* If the statement may clobber expr, it dies. */
if (stmt_may_clobber_ref_p (def, ref))
if (stmt_may_clobber_ref_p_1 (def, &ref))
{
res = true;
break;
@ -3793,7 +3780,8 @@ compute_avail (void)
continue;
copy_reference_ops_from_call (stmt, &ops);
vn_reference_lookup_pieces (gimple_vuse (stmt),
vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
gimple_expr_type (stmt),
ops, &ref, false);
VEC_free (vn_reference_op_s, heap, ops);
if (!ref)

View File

@ -543,28 +543,35 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
a matching type is not necessary and a mismatching type
is always a spurious difference. */
temp.type = NULL_TREE;
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
/* If this is a reference to a union member, record the union
member size as operand. Do so only if we are doing
expression insertion (during FRE), as PRE currently gets
confused with this. */
if (may_insert
&& TREE_OPERAND (ref, 2) == NULL_TREE
&& TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
&& integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
&& integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
else
{
/* Record field as operand. */
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
}
&& temp.op1 == NULL_TREE
&& TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
&& integer_zerop (DECL_FIELD_OFFSET (temp.op0))
&& integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
&& host_integerp (TYPE_SIZE (TREE_TYPE (temp.op0)), 0))
temp.op0 = TYPE_SIZE (TREE_TYPE (temp.op0));
break;
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* Record index as operand. */
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
/* Record even constant lower bounds. */
if (TREE_OPERAND (ref, 2))
temp.op1 = TREE_OPERAND (ref, 2);
else
{
tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
if (domain
&& TYPE_MIN_VALUE (domain)
&& !integer_zerop (TYPE_MIN_VALUE (domain)))
temp.op1 = TYPE_MIN_VALUE (domain);
}
temp.op2 = TREE_OPERAND (ref, 3);
break;
case STRING_CST:
@ -612,24 +619,68 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
}
}
/* Re-create a reference tree from the reference ops OPS.
Returns NULL_TREE if the ops were not handled.
This routine needs to be kept in sync with copy_reference_ops_from_ref. */
/* Build a alias-oracle reference abstraction in *REF from the vn_reference
operands in *OPS, the reference alias set SET and the reference type TYPE.
Return true if something useful was produced. */
tree
get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *ops)
{
vn_reference_op_t op;
unsigned i;
tree ref, *op0_p = &ref;
tree base = NULL_TREE;
tree *op0_p = &base;
HOST_WIDE_INT offset = 0;
HOST_WIDE_INT max_size;
HOST_WIDE_INT size = -1;
tree size_tree = NULL_TREE;
/* First get the final access size from just the outermost expression. */
op = VEC_index (vn_reference_op_s, ops, 0);
if (op->opcode == COMPONENT_REF)
{
if (TREE_CODE (op->op0) == INTEGER_CST)
size_tree = op->op0;
else
size_tree = DECL_SIZE (op->op0);
}
else if (op->opcode == BIT_FIELD_REF)
size_tree = op->op0;
else
{
enum machine_mode mode = TYPE_MODE (type);
if (mode == BLKmode)
size_tree = TYPE_SIZE (type);
else
size = GET_MODE_BITSIZE (mode);
}
if (size_tree != NULL_TREE)
{
if (!host_integerp (size_tree, 1))
size = -1;
else
size = TREE_INT_CST_LOW (size_tree);
}
/* Initially, maxsize is the same as the accessed element size.
In the following it will only grow (or become -1). */
max_size = size;
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
{
switch (op->opcode)
{
/* These may be in the reference ops, but we cannot do anything
sensible with them here. */
case CALL_EXPR:
return NULL_TREE;
case ADDR_EXPR:
return false;
/* Record the base objects. */
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
*op0_p = build1 (op->opcode, op->type, NULL_TREE);
@ -642,26 +693,69 @@ get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
case SSA_NAME:
case FILTER_EXPR:
case EXC_PTR_EXPR:
*op0_p = op->op0;
break;
/* And now the usual component-reference style ops. */
case BIT_FIELD_REF:
*op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
op->op0, op->op1);
op0_p = &TREE_OPERAND (*op0_p, 0);
offset += tree_low_cst (op->op1, 0);
break;
case COMPONENT_REF:
/* We cannot re-construct our fancy union reference handling. */
if (TREE_CODE (op->op0) == INTEGER_CST)
return NULL_TREE;
*op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
op->op0, op->op1);
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
{
tree field = op->op0;
/* We do not have a complete COMPONENT_REF tree here so we
cannot use component_ref_field_offset. Do the interesting
parts manually. */
/* Our union trick, done for offset zero only. */
if (TREE_CODE (field) == INTEGER_CST)
;
else if (op->op1
|| !host_integerp (DECL_FIELD_OFFSET (field), 1))
max_size = -1;
else
{
offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
* BITS_PER_UNIT);
offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
}
break;
}
case ARRAY_RANGE_REF:
case ARRAY_REF:
*op0_p = build4 (op->opcode, op->type, NULL_TREE,
op->op0, op->op1, op->op2);
op0_p = &TREE_OPERAND (*op0_p, 0);
/* Same for ARRAY_REFs. We do not have access to the array
type here, but we recorded the lower bound in op1. */
if (op->op2
|| !host_integerp (op->op0, 0)
|| (op->op1 && !host_integerp (op->op1, 0))
|| !host_integerp (TYPE_SIZE (op->type), 1))
max_size = -1;
else
{
HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
if (op->op1)
hindex -= TREE_INT_CST_LOW (op->op1);
hindex *= TREE_INT_CST_LOW (TYPE_SIZE (op->type));
offset += hindex;
}
break;
case REALPART_EXPR:
break;
case IMAGPART_EXPR:
offset += size;
break;
case VIEW_CONVERT_EXPR:
break;
case STRING_CST:
@ -670,37 +764,26 @@ get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
case VECTOR_CST:
case REAL_CST:
case CONSTRUCTOR:
case VAR_DECL:
case PARM_DECL:
case CONST_DECL:
case RESULT_DECL:
case SSA_NAME:
case FILTER_EXPR:
case EXC_PTR_EXPR:
*op0_p = op->op0;
break;
case ADDR_EXPR:
if (op->op0 != NULL_TREE)
{
gcc_assert (is_gimple_min_invariant (op->op0));
*op0_p = op->op0;
break;
}
/* Fallthrough. */
case IMAGPART_EXPR:
case REALPART_EXPR:
case VIEW_CONVERT_EXPR:
*op0_p = build1 (op->opcode, op->type, NULL_TREE);
op0_p = &TREE_OPERAND (*op0_p, 0);
break;
return false;
default:
return NULL_TREE;
return false;
}
}
return ref;
if (base == NULL_TREE)
return false;
ref->ref = NULL_TREE;
ref->base = base;
ref->offset = offset;
ref->size = size;
ref->max_size = max_size;
ref->ref_alias_set = set;
ref->base_alias_set = -1;
return true;
}
/* Copy the operations present in load/store/call REF into RESULT, a vector of
@ -920,7 +1003,7 @@ vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
with the current VUSE and performs the expression lookup. */
static void *
vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
{
vn_reference_t vr = (vn_reference_t)vr_;
void **slot;
@ -949,16 +1032,18 @@ vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
of VUSE. */
static void *
vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree fndecl;
tree ref = *refp;
tree base;
HOST_WIDE_INT offset, size, maxsize;
base = get_ref_base_and_extent (ref, &offset, &size, &maxsize);
base = ao_ref_base (ref);
offset = ref->offset;
size = ref->size;
maxsize = ref->max_size;
/* If we cannot constrain the size of the reference we cannot
test if anything kills it. */
@ -968,7 +1053,7 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
/* def_stmt may-defs *ref. See if we can derive a value for *ref
from that defintion.
1) Memset. */
if (is_gimple_reg_type (TREE_TYPE (ref))
if (is_gimple_reg_type (vr->type)
&& is_gimple_call (def_stmt)
&& (fndecl = gimple_call_fndecl (def_stmt))
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
@ -987,13 +1072,18 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
return vn_reference_insert (ref,
fold_convert (TREE_TYPE (ref),
integer_zero_node), vuse);
{
tree val = fold_convert (vr->type, integer_zero_node);
unsigned int value_id = get_or_alloc_constant_value_id (val);
return vn_reference_insert_pieces (vuse, vr->set, vr->type,
VEC_copy (vn_reference_op_s,
heap, vr->operands),
val, value_id);
}
}
/* 2) Assignment from an empty CONSTRUCTOR. */
else if (is_gimple_reg_type (TREE_TYPE (ref))
else if (is_gimple_reg_type (vr->type)
&& gimple_assign_single_p (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
&& CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
@ -1005,9 +1095,14 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
if (operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
return vn_reference_insert (ref,
fold_convert (TREE_TYPE (ref),
integer_zero_node), vuse);
{
tree val = fold_convert (vr->type, integer_zero_node);
unsigned int value_id = get_or_alloc_constant_value_id (val);
return vn_reference_insert_pieces (vuse, vr->set, vr->type,
VEC_copy (vn_reference_op_s,
heap, vr->operands),
val, value_id);
}
}
/* For aggregate copies translate the reference through them if
@ -1022,6 +1117,7 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
int i, j;
VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
vn_reference_op_t vro;
ao_ref r;
/* See if the assignment kills REF. */
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
@ -1071,9 +1167,12 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
VEC_free (vn_reference_op_s, heap, rhs);
vr->hashcode = vn_reference_compute_hash (vr);
*refp = get_ref_from_reference_ops (vr->operands);
if (!*refp)
/* Adjust *ref from the new operands. */
if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
return (void *)-1;
gcc_assert (ref->size == r.size);
*ref = r;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
@ -1089,7 +1188,7 @@ vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
vn_reference_t stored in the hashtable if something is found. */
tree
vn_reference_lookup_pieces (tree vuse,
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands,
vn_reference_t *vnresult, bool maywalk)
{
@ -1110,6 +1209,8 @@ vn_reference_lookup_pieces (tree vuse,
* VEC_length (vn_reference_op_s, operands));
vr1.operands = operands = shared_lookup_references
= valueize_refs (shared_lookup_references);
vr1.type = type;
vr1.set = set;
vr1.hashcode = vn_reference_compute_hash (&vr1);
vn_reference_lookup_1 (&vr1, vnresult);
@ -1117,10 +1218,10 @@ vn_reference_lookup_pieces (tree vuse,
&& maywalk
&& vr1.vuse)
{
tree ref = get_ref_from_reference_ops (operands);
if (ref)
ao_ref r;
if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
@ -1151,14 +1252,18 @@ vn_reference_lookup (tree op, tree vuse, bool maywalk,
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
vr1.type = TREE_TYPE (op);
vr1.set = get_alias_set (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if (maywalk
&& vr1.vuse)
{
vn_reference_t wvnresult;
ao_ref r;
ao_ref_init (&r, op);
wvnresult =
(vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
@ -1193,6 +1298,8 @@ vn_reference_insert (tree op, tree result, tree vuse)
vr1->value_id = get_or_alloc_constant_value_id (result);
vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
vr1->type = TREE_TYPE (op);
vr1->set = get_alias_set (op);
vr1->hashcode = vn_reference_compute_hash (vr1);
vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
@ -1220,7 +1327,7 @@ vn_reference_insert (tree op, tree result, tree vuse)
structure we created. */
vn_reference_t
vn_reference_insert_pieces (tree vuse,
vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
VEC (vn_reference_op_s, heap) *operands,
tree result, unsigned int value_id)
@ -1232,6 +1339,8 @@ vn_reference_insert_pieces (tree vuse,
vr1->value_id = value_id;
vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1->operands = valueize_refs (operands);
vr1->type = type;
vr1->set = set;
vr1->hashcode = vn_reference_compute_hash (vr1);
if (result && TREE_CODE (result) == SSA_NAME)
result = SSA_VAL (result);
@ -1825,6 +1934,8 @@ visit_reference_op_call (tree lhs, gimple stmt)
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1.operands = valueize_shared_reference_ops_from_call (stmt);
vr1.type = gimple_expr_type (stmt);
vr1.set = 0;
vr1.hashcode = vn_reference_compute_hash (&vr1);
result = vn_reference_lookup_1 (&vr1, NULL);
if (result)
@ -1842,6 +1953,8 @@ visit_reference_op_call (tree lhs, gimple stmt)
vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
vr2->vuse = vr1.vuse;
vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
vr2->type = vr1.type;
vr2->set = vr1.set;
vr2->hashcode = vr1.hashcode;
vr2->result = lhs;
slot = htab_find_slot_with_hash (current_info->references,

View File

@ -92,6 +92,8 @@ typedef struct vn_reference_s
unsigned int value_id;
hashval_t hashcode;
tree vuse;
alias_set_type set;
tree type;
VEC (vn_reference_op_s, heap) *operands;
tree result;
} *vn_reference_t;
@ -177,13 +179,14 @@ void vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **,
unsigned int *);
void copy_reference_ops_from_ref (tree, VEC(vn_reference_op_s, heap) **);
void copy_reference_ops_from_call (gimple, VEC(vn_reference_op_s, heap) **);
tree get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops);
tree vn_reference_lookup_pieces (tree,
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *);
tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *,
vn_reference_t *, bool);
tree vn_reference_lookup (tree, tree, bool, vn_reference_t *);
vn_reference_t vn_reference_insert (tree, tree, tree);
vn_reference_t vn_reference_insert_pieces (tree,
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
VEC (vn_reference_op_s, heap) *,
tree, unsigned int);