re PR tree-optimization/36327 (SCCVN should look through struct copies)

2009-05-25  Richard Guenther  <rguenther@suse.de>

	PR tree-optimization/36327
	* tree-ssa-alias.c (walk_non_aliased_vuses): Add second walker
	callback for reference translation or lookup at the point
	of may-defs.
	* tree-ssa-alias.h (walk_non_aliased_vuses): Adjust prototype.
	* tree-ssa-sccvn.c (get_ref_from_reference_ops): Bail out
	for union COMPONENT_REFs.
	(vn_reference_lookup_3): New callback.  Lookup from memset
	and CONSTRUCTOR assignment, translate through struct copies.
	(vn_reference_lookup_pieces): Make sure to not free the
	passed operands array.  Adjust walk_non_aliased_vuses call.
	(vn_reference_lookup): Adjust walk_non_aliased_vuses call,
	make sure we do not leak memory.

	* gcc.dg/tree-ssa/ssa-fre-24.c: New testcase.
	* gcc.dg/tree-ssa/ssa-fre-25.c: Likewise.
	* gcc.dg/tree-ssa/sra-2.c: Disable FRE.
	* gcc.dg/vect/no-vfa-vect-43.c: Adjust.
	* gcc.dg/vect/vect-40.c: Likewise.
	* gcc.dg/vect/vect-42.c: Likewise.
	* gcc.dg/vect/vect-46.c: Likewise.
	* gcc.dg/vect/vect-76.c: Likewise.

From-SVN: r147851
This commit is contained in:
Richard Guenther 2009-05-25 15:18:21 +00:00 committed by Richard Biener
parent c74b74a8b2
commit 01df5c8ae2
13 changed files with 328 additions and 39 deletions

View File

@ -1,3 +1,19 @@
2009-05-25 Richard Guenther <rguenther@suse.de>
PR tree-optimization/36327
* tree-ssa-alias.c (walk_non_aliased_vuses): Add second walker
callback for reference translation or lookup at the point
of may-defs.
* tree-ssa-alias.h (walk_non_aliased_vuses): Adjust prototype.
* tree-ssa-sccvn.c (get_ref_from_reference_ops): Bail out
for union COMPONENT_REFs.
(vn_reference_lookup_3): New callback. Lookup from memset
and CONSTRUCTOR assignment, translate through struct copies.
(vn_reference_lookup_pieces): Make sure to not free the
passed operands array. Adjust walk_non_aliased_vuses call.
(vn_reference_lookup): Adjust walk_non_aliased_vuses call,
make sure we do not leak memory.
2009-05-25 Richard Guenther <rguenther@suse.de>
* tree-ssa-alias.h (dump_points_to_solution): Declare.

View File

@ -1,3 +1,15 @@
2009-05-25 Richard Guenther <rguenther@suse.de>
PR tree-optimization/36327
* gcc.dg/tree-ssa/ssa-fre-24.c: New testcase.
* gcc.dg/tree-ssa/ssa-fre-25.c: Likewise.
* gcc.dg/tree-ssa/sra-2.c: Disable FRE.
* gcc.dg/vect/no-vfa-vect-43.c: Adjust.
* gcc.dg/vect/vect-40.c: Likewise.
* gcc.dg/vect/vect-42.c: Likewise.
* gcc.dg/vect/vect-46.c: Likewise.
* gcc.dg/vect/vect-76.c: Likewise.
2009-05-25 Janus Weil <janus@gcc.gnu.org>
PR fortran/40176

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O1 -fdump-tree-optimized --param sra-max-structure-size=32" } */
/* { dg-options "-O1 -fno-tree-fre -fdump-tree-optimized --param sra-max-structure-size=32" } */
/* Test for SRA. */

View File

@ -0,0 +1,34 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-fre" } */
int foo(void)
{
int a[16] = {};
return a[3];
}
int bar(void)
{
int a[16];
__builtin_memset (a, 0, sizeof(a));
return a[3];
}
struct X { int i; };
int baz(void)
{
struct X a,b;
a.i = 0;
b = a;
return b.i;
}
int bazzoo (void)
{
struct X b, a = {};
b = a;
return b.i;
}
/* { dg-final { scan-tree-dump-times "= 0;" 5 "fre" } } */
/* { dg-final { cleanup-tree-dump "fre" } } */

View File

@ -0,0 +1,18 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-fre" } */
struct X { int i; int j; };
void bar (struct X *);
int foo (struct X *p)
{
struct X x;
p->i = 1;
x = *p;
x.j = 2;
return p->i - x.i;
}
/* We should optimize this to return 0. */
/* { dg-final { scan-tree-dump "= 0;" "fre" } } */
/* { dg-final { cleanup-tree-dump "fre" } } */

View File

@ -22,41 +22,53 @@ void bar (float *pa, float *pb, float *pc)
__attribute__ ((noinline)) int
main1 (float *pa)
main1 (float *pa, float *pb, float *pc)
{
int i;
float pb[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float pc[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
for (i = 0; i < N; i++)
{
b[i] = pb[i];
c[i] = pc[i];
}
/* Vectorizable: pa may not alias pb and/or pc, even though their
addresses escape. &pa would need to escape to point to escaped memory. */
for (i = 0; i < N; i++)
{
pa[i] = pb[i] * pc[i];
pa[i] = b[i] * c[i];
}
bar (pa,pb,pc);
bar (pa,b,c);
return 0;
}
__attribute__ ((noinline)) int
main2 (float * pa)
main2 (float *pa, float *pb, float *pc)
{
int i;
float pb[N] = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float pc[N] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
for (i = 0; i < N; i++)
{
b[i] = pb[i];
c[i] = pc[i];
}
/* Vectorizable: pb and pc addresses do not escape. */
for (i = 0; i < N; i++)
{
pa[i] = pb[i] * pc[i];
pa[i] = b[i] * c[i];
}
/* check results: */
for (i = 0; i < N; i++)
{
if (pa[i] != (pb[i] * pc[i]))
if (pa[i] != (b[i] * c[i]))
abort ();
}
@ -67,14 +79,16 @@ int main (void)
{
int i;
float a[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float b[N] = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
check_vect ();
main1 (a);
main2 (a);
main1 (a,b,c);
main2 (a,b,c);
return 0;
}
/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 2 "vect" } } */
/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 2 "vect" } } */
/* { dg-final { scan-tree-dump-times "Alignment of access forced using versioning" 2 "vect" { target vect_no_align } } } */
/* { dg-final { cleanup-tree-dump "vect" } } */

View File

@ -26,13 +26,16 @@ void bar (float *pa, float *pb, float *pc)
vect-46.c is similar to this one with one difference:
the loop bound is unknown. */
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)))
= {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)))
= {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
__attribute__ ((noinline)) int
main1 ()
{
int i;
float a[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
float *pa = a;
float *pb = b;
float *pc = c;

View File

@ -27,15 +27,22 @@ void bar (float *pa, float *pb, float *pc)
No aliasing problems. */
__attribute__ ((noinline)) int
main1 (float * __restrict__ pa)
main1 (float * __restrict__ pa, float *pb, float *pc)
{
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
int i;
float pb[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float pc[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
/* We also vectorize this loop. */
for (i = 0; i < N; i++)
{
b[i] = pb[i];
c[i] = pc[i];
}
for (i = 0; i < N; i++)
{
pa[i] = pb[i] * pc[i];
pa[i] = b[i] * c[i];
}
return 0;
@ -45,18 +52,18 @@ int main (void)
{
int i;
float a[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
float b[N] = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
check_vect ();
main1 (a);
main1 (a,b,c);
bar (a,b,c);
return 0;
}
/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 1 "vect" } } */
/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" } } */
/* { dg-final { scan-tree-dump-times "Alignment of access forced using versioning" 1 "vect" { target { vect_no_align || { ! vector_alignment_reachable } } } } } */
/* { dg-final { scan-tree-dump-times "Vectorizing an unaligned access" 2 "vect" { xfail { vect_no_align || { ! vector_alignment_reachable } } } } } */
/* { dg-final { scan-tree-dump-times "Vectorizing an unaligned access" 4 "vect" { xfail { vect_no_align || { ! vector_alignment_reachable } } } } } */
/* { dg-final { scan-tree-dump-times "Alignment of access forced using peeling" 1 "vect" { xfail {vect_no_align || { ! vector_alignment_reachable } } } } } */
/* { dg-final { cleanup-tree-dump "vect" } } */

View File

@ -26,11 +26,16 @@ void bar (float *pa, float *pb, float *pc)
vect-40.c is similar to this one with one difference:
the loop bound is known. */
float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)))
= {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)))
= {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
__attribute__ ((noinline)) int
main1 (int n)
{
int i;
float a[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))); float b[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57}; float c[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
float a[N] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__)));
float *pa = a;
float *pb = b;
float *pc = c;

View File

@ -11,13 +11,13 @@
more involved than just an ssa_name. */
int ib[N+OFF] __attribute__ ((__aligned__(__BIGGEST_ALIGNMENT__))) = {0, 1, 3, 5, 7, 11, 13, 17, 0, 2, 6, 10};
int ic[N+OFF] = {0, 1, 3, 5, 7, 11, 13, 17, 0, 2, 6, 10};
__attribute__ ((noinline))
int main1 (int *pib)
{
int i;
int ia[N+OFF];
int ic[N+OFF] = {0, 1, 3, 5, 7, 11, 13, 17, 0, 2, 6, 10};
for (i = OFF; i < N; i++)
{

View File

@ -1086,11 +1086,19 @@ get_continuation_for_phi (gimple phi, tree ref, bitmap *visited)
WALKER returns non-NULL the walk stops and its result is returned.
At the end of a non-successful walk NULL is returned.
TRANSLATE if non-NULL is called with a pointer to REF, the virtual
use which definition is a statement that may clobber REF and DATA.
If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
If TRANSLATE returns non-NULL the walk stops and its result is returned.
If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
to adjust REF and *DATA to make that valid.
TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
void *
walk_non_aliased_vuses (tree ref, tree vuse,
void *(*walker)(tree, tree, void *), void *data)
void *(*walker)(tree, tree, void *),
void *(*translate)(tree *, tree, void *),void *data)
{
bitmap visited = NULL;
void *res;
@ -1114,7 +1122,21 @@ walk_non_aliased_vuses (tree ref, tree vuse,
else
{
if (stmt_may_clobber_ref_p (def_stmt, ref))
break;
{
if (!translate)
break;
res = (*translate) (&ref, vuse, data);
/* Failed lookup and translation. */
if (res == (void *)-1)
{
res = NULL;
break;
}
/* Lookup succeeded. */
else if (res != NULL)
break;
/* Translation succeeded, continue walking. */
}
vuse = gimple_vuse (def_stmt);
}
}

View File

@ -83,7 +83,8 @@ extern bool refs_output_dependent_p (tree, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, tree);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern void *walk_non_aliased_vuses (tree, tree,
void *(*)(tree, tree, void *), void *);
void *(*)(tree, tree, void *),
void *(*)(tree *, tree, void *), void *);
extern unsigned int walk_aliased_vdefs (tree, tree,
bool (*)(tree, tree, void *), void *,
bitmap *);

View File

@ -649,6 +649,9 @@ get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
break;
case COMPONENT_REF:
/* We cannot re-construct our fancy union reference handling. */
if (TREE_CODE (op->op0) == INTEGER_CST)
return NULL_TREE;
*op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
op->op0, op->op1);
op0_p = &TREE_OPERAND (*op0_p, 0);
@ -940,6 +943,146 @@ vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
return NULL;
}
/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
from the statement defining VUSE and if not successful tries to
translate *REFP and VR_ through an aggregate copy at the defintion
of VUSE. */
static void *
vn_reference_lookup_3 (tree *refp, tree vuse, void *vr_)
{
vn_reference_t vr = (vn_reference_t)vr_;
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree fndecl;
tree ref = *refp;
tree base;
HOST_WIDE_INT offset, size, maxsize;
base = get_ref_base_and_extent (ref, &offset, &size, &maxsize);
/* If we cannot constrain the size of the reference we cannot
test if anything kills it. */
if (maxsize == -1)
return (void *)-1;
/* def_stmt may-defs *ref. See if we can derive a value for *ref
from that defintion.
1) Memset. */
if (is_gimple_reg_type (TREE_TYPE (ref))
&& is_gimple_call (def_stmt)
&& (fndecl = gimple_call_fndecl (def_stmt))
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
&& integer_zerop (gimple_call_arg (def_stmt, 1))
&& host_integerp (gimple_call_arg (def_stmt, 2), 1)
&& TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
{
tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
if ((unsigned HOST_WIDE_INT)size2 / 8
== TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
return vn_reference_insert (ref,
fold_convert (TREE_TYPE (ref),
integer_zero_node), vuse);
}
/* 2) Assignment from an empty CONSTRUCTOR. */
else if (is_gimple_reg_type (TREE_TYPE (ref))
&& gimple_assign_single_p (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
&& CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
{
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
&offset2, &size2, &maxsize2);
if (operand_equal_p (base, base2, 0)
&& offset2 <= offset
&& offset2 + size2 >= offset + maxsize)
return vn_reference_insert (ref,
fold_convert (TREE_TYPE (ref),
integer_zero_node), vuse);
}
/* For aggregate copies translate the reference through them if
the copy kills ref. */
else if (gimple_assign_single_p (def_stmt)
&& (DECL_P (gimple_assign_rhs1 (def_stmt))
|| INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
|| handled_component_p (gimple_assign_rhs1 (def_stmt))))
{
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
VEC (vn_reference_op_s, heap) *lhs = NULL, *rhs = NULL;
vn_reference_op_t vro;
/* See if the assignment kills REF. */
base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
&offset2, &size2, &maxsize2);
if (!operand_equal_p (base, base2, 0)
|| offset2 > offset
|| offset2 + size2 < offset + maxsize)
return (void *)-1;
/* Find the common base of ref and the lhs. */
copy_reference_ops_from_ref (gimple_assign_lhs (def_stmt), &lhs);
i = VEC_length (vn_reference_op_s, vr->operands) - 1;
j = VEC_length (vn_reference_op_s, lhs) - 1;
while (j >= 0
&& vn_reference_op_eq (VEC_index (vn_reference_op_s,
vr->operands, i),
VEC_index (vn_reference_op_s, lhs, j)))
{
i--;
j--;
}
/* i now points to the first additional op.
??? LHS may not be completely contained in VR, one or more
VIEW_CONVERT_EXPRs could be in its way. We could at least
try handling outermost VIEW_CONVERT_EXPRs. */
if (j != -1)
return (void *)-1;
VEC_free (vn_reference_op_s, heap, lhs);
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
/* We need to pre-pend vr->operands[0..i] to rhs. */
if (i + 1 + VEC_length (vn_reference_op_s, rhs)
> VEC_length (vn_reference_op_s, vr->operands))
{
VEC (vn_reference_op_s, heap) *old = vr->operands;
VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
i + 1 + VEC_length (vn_reference_op_s, rhs));
if (old == shared_lookup_references
&& vr->operands != old)
shared_lookup_references = NULL;
}
else
VEC_truncate (vn_reference_op_s, vr->operands,
i + 1 + VEC_length (vn_reference_op_s, rhs));
for (j = 0; VEC_iterate (vn_reference_op_s, rhs, j, vro); ++j)
VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
VEC_free (vn_reference_op_s, heap, rhs);
vr->hashcode = vn_reference_compute_hash (vr);
*refp = get_ref_from_reference_ops (vr->operands);
if (!*refp)
return (void *)-1;
/* Keep looking for the adjusted *REF / VR pair. */
return NULL;
}
/* Bail out and stop walking. */
return (void *)-1;
}
/* Lookup a reference operation by it's parts, in the current hash table.
Returns the resulting value number if it exists in the hash table,
NULL_TREE otherwise. VNRESULT will be filled in with the actual
@ -956,9 +1099,17 @@ vn_reference_lookup_pieces (tree vuse,
if (!vnresult)
vnresult = &tmp;
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1.operands = valueize_refs (operands);
VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
VEC_length (vn_reference_op_s, operands));
memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
VEC_address (vn_reference_op_s, operands),
sizeof (vn_reference_op_s)
* VEC_length (vn_reference_op_s, operands));
vr1.operands = operands = shared_lookup_references
= valueize_refs (shared_lookup_references);
vr1.hashcode = vn_reference_compute_hash (&vr1);
vn_reference_lookup_1 (&vr1, vnresult);
@ -967,11 +1118,13 @@ vn_reference_lookup_pieces (tree vuse,
&& vr1.vuse)
{
tree ref = get_ref_from_reference_ops (operands);
if (!ref)
return NULL_TREE;
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
vn_reference_lookup_2, &vr1);
if (ref)
*vnresult =
(vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
VEC_free (vn_reference_op_s, heap, vr1.operands);
}
if (*vnresult)
@ -990,13 +1143,14 @@ tree
vn_reference_lookup (tree op, tree vuse, bool maywalk,
vn_reference_t *vnresult)
{
VEC (vn_reference_op_s, heap) *operands;
struct vn_reference_s vr1;
if (vnresult)
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
vr1.operands = valueize_shared_reference_ops_from_ref (op);
vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
vr1.hashcode = vn_reference_compute_hash (&vr1);
if (maywalk
@ -1005,7 +1159,10 @@ vn_reference_lookup (tree op, tree vuse, bool maywalk,
vn_reference_t wvnresult;
wvnresult =
(vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
vn_reference_lookup_2, &vr1);
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
VEC_free (vn_reference_op_s, heap, vr1.operands);
if (wvnresult)
{
if (vnresult)