Fix availability compute during VN DOM elimination

This fixes an issue with redundant store elimination in FRE/PRE
which, when invoked by the DOM elimination walk, ends up using
possibly stale availability data from the RPO walk.  It also
fixes a missed optimization during valueization of addresses
by making sure to use get_addr_base_and_unit_offset_1 which can
valueize and adjusting that to also valueize ARRAY_REFs low-bound.

2020-05-08  Richard Biener  <rguenther@suse.de>

	* tree-ssa-sccvn.c (rpo_avail): Change type to
	eliminate_dom_walker *.
	(eliminate_with_rpo_vn): Adjust rpo_avail to make vn_valueize
	use the DOM walker availability.
	(vn_reference_fold_indirect): Use get_addr_base_and_unit_offset_1
	with vn_valueize as valueization callback.
	(vn_reference_maybe_forwprop_address): Likewise.
	* tree-dfa.c (get_addr_base_and_unit_offset_1): Also valueize
	array_ref_low_bound.

	* gnat.dg/opt83.adb: New testcase.
This commit is contained in:
Richard Biener 2020-05-08 10:24:37 +02:00
parent 1595a1cb7b
commit 2b42509f8b
5 changed files with 76 additions and 20 deletions

View File

@ -1,3 +1,15 @@
2020-05-08 Richard Biener <rguenther@suse.de>
* tree-ssa-sccvn.c (rpo_avail): Change type to
eliminate_dom_walker *.
(eliminate_with_rpo_vn): Adjust rpo_avail to make vn_valueize
use the DOM walker availability.
(vn_reference_fold_indirect): Use get_addr_base_and_unit_offset_1
with vn_valueize as valueization callback.
(vn_reference_maybe_forwprop_address): Likewise.
* tree-dfa.c (get_addr_base_and_unit_offset_1): Also valueize
array_ref_low_bound.
2020-05-08 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/94786

View File

@ -1,3 +1,7 @@
2020-05-08 Richard Biener <rguenther@suse.de>
* gnat.dg/opt83.adb: New testcase.
2020-05-08 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/94786

View File

@ -0,0 +1,33 @@
-- { dg-do compile }
-- { dg-options "-O2" }
-- rpo fre3 used to loop indefinitely replacing _2 with _8 and back,
-- given MEM[(struct test__e &)_2][0]{lb: _7 sz: 16}._tag = A23s_29;
-- and an earlier _8 = &*_2[0]{lb: _7 sz: 16}.
procedure Opt83 is
type E is tagged record
I : Natural := 0;
end record;
type A is array (Natural range <>) of aliased E;
F : E;
R : access A;
procedure N is
begin
if R = null then
R := new A (0 .. 4);
end if;
end N;
begin
N;
R (0) := F;
end Opt83;

View File

@ -806,23 +806,25 @@ get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
if (valueize
&& TREE_CODE (index) == SSA_NAME)
index = (*valueize) (index);
if (!poly_int_tree_p (index))
return NULL_TREE;
low_bound = array_ref_low_bound (exp);
if (valueize
&& TREE_CODE (low_bound) == SSA_NAME)
low_bound = (*valueize) (low_bound);
if (!poly_int_tree_p (low_bound))
return NULL_TREE;
unit_size = array_ref_element_size (exp);
if (TREE_CODE (unit_size) != INTEGER_CST)
return NULL_TREE;
/* If the resulting bit-offset is constant, track it. */
if (poly_int_tree_p (index)
&& (low_bound = array_ref_low_bound (exp),
poly_int_tree_p (low_bound))
&& (unit_size = array_ref_element_size (exp),
TREE_CODE (unit_size) == INTEGER_CST))
{
poly_offset_int woffset
= wi::sext (wi::to_poly_offset (index)
- wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
byte_offset += woffset.force_shwi ();
}
else
return NULL_TREE;
poly_offset_int woffset
= wi::sext (wi::to_poly_offset (index)
- wi::to_poly_offset (low_bound),
TYPE_PRECISION (TREE_TYPE (index)));
woffset *= wi::to_offset (unit_size);
byte_offset += woffset.force_shwi ();
}
break;

View File

@ -1224,8 +1224,8 @@ vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
/* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the
address with &OBJ. */
addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
&addr_offset);
addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
&addr_offset, vn_valueize);
gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
if (addr_base != TREE_OPERAND (op->op0, 0))
{
@ -1282,8 +1282,9 @@ vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
poly_int64 addr_offset;
addr = gimple_assign_rhs1 (def_stmt);
addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
&addr_offset);
addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
&addr_offset,
vn_valueize);
/* If that didn't work because the address isn't invariant propagate
the reference tree from the address operation in case the current
dereference isn't offsetted. */
@ -2419,7 +2420,7 @@ public:
};
/* Global RPO state for access from hooks. */
static rpo_elim *rpo_avail;
static eliminate_dom_walker *rpo_avail;
basic_block vn_context_bb;
/* Return true if BASE1 and BASE2 can be adjusted so they have the
@ -6559,7 +6560,11 @@ eliminate_with_rpo_vn (bitmap inserted_exprs)
{
eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
eliminate_dom_walker *saved_rpo_avail = rpo_avail;
rpo_avail = &walker;
walker.walk (cfun->cfg->x_entry_block_ptr);
rpo_avail = saved_rpo_avail;
return walker.eliminate_cleanup ();
}