fold-const.c (get_pointer_modulus_and_residue): New parameter allow_func_align.

2009-03-28  Martin Jambor  <mjambor@suse.cz>

	* fold-const.c (get_pointer_modulus_and_residue): New parameter
	allow_func_align.
	(fold_binary): Allow function decl aligment consideration is the
	second argument is integer constant one.
	* tree-ssa-forwprop.c (simplify_bitwise_and): New function.
	(tree_ssa_forward_propagate_single_use_vars): Handle assing statements
	with BIT_AND_EXPR on the RHS by calling simplify_bitwise_and.

	* g++.dg/tree-ssa/fwprop-align.C: New test.

From-SVN: r145203
This commit is contained in:
Martin Jambor 2009-03-28 19:10:14 +01:00 committed by Martin Jambor
parent fe89fbc56d
commit 617f389789
5 changed files with 99 additions and 5 deletions

View File

@ -1,3 +1,13 @@
2009-03-28 Martin Jambor <mjambor@suse.cz>
* fold-const.c (get_pointer_modulus_and_residue): New parameter
allow_func_align.
(fold_binary): Allow function decl aligment consideration is the
second argument is integer constant one.
* tree-ssa-forwprop.c (simplify_bitwise_and): New function.
(tree_ssa_forward_propagate_single_use_vars): Handle assing statements
with BIT_AND_EXPR on the RHS by calling simplify_bitwise_and.
2009-03-28 Jan Hubicka <jh@suse.cz>
* dwarf2out.c (dwarf2out_begin_prologue): Use crtl->nothrow

View File

@ -9551,10 +9551,15 @@ fold_mult_zconjz (tree type, tree expr)
0 <= N < M as is common. In general, the precise value of P is unknown.
M is chosen as large as possible such that constant N can be determined.
Returns M and sets *RESIDUE to N. */
Returns M and sets *RESIDUE to N.
If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
account. This is not always possible due to PR 35705.
*/
static unsigned HOST_WIDE_INT
get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
bool allow_func_align)
{
enum tree_code code;
@ -9584,7 +9589,8 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
}
}
if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
if (DECL_P (expr)
&& (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
return DECL_ALIGN_UNIT (expr);
}
else if (code == POINTER_PLUS_EXPR)
@ -9595,7 +9601,8 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
op0 = TREE_OPERAND (expr, 0);
STRIP_NOPS (op0);
modulus = get_pointer_modulus_and_residue (op0, residue);
modulus = get_pointer_modulus_and_residue (op0, residue,
allow_func_align);
op1 = TREE_OPERAND (expr, 1);
STRIP_NOPS (op1);
@ -11235,7 +11242,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
unsigned HOST_WIDE_INT modulus, residue;
unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
modulus = get_pointer_modulus_and_residue (arg0, &residue);
modulus = get_pointer_modulus_and_residue (arg0, &residue,
integer_onep (arg1));
/* This works because modulus is a power of 2. If this weren't the
case, we'd have to replace it by its greatest power-of-2

View File

@ -1,3 +1,7 @@
2009-03-28 Martin Jambor <mjambor@suse.cz>
* g++.dg/tree-ssa/fwprop-align.C: New test.
2009-03-28 Jakub Jelinek <jakub@redhat.com>
* gcc.target/powerpc/altivec-28.c: New test.

View File

@ -0,0 +1,20 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-forwprop2" } */
struct A
{
void foo ()
{
}
};
int main()
{
void (A::* const p)() = & A::foo;
A a;
(a.*p)();
}
/* We should eliminate the check if p points to a virtual function. */
/* { dg-final { scan-tree-dump-times "& 1" 0 "forwprop2" } } */
/* { dg-final { cleanup-tree-dump "forwprop2" } } */

View File

@ -147,6 +147,14 @@ along with GCC; see the file COPYING3. If not see
ptr2 = &x[index];
Or
ssa = (int) decl
res = ssa & 1
Provided that decl has known alignment >= 2, will get turned into
res = 0
We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
{NOT_EXPR,NEG_EXPR}.
@ -1124,6 +1132,45 @@ simplify_gimple_switch (gimple stmt)
}
}
/* Run bitwise and assignments throug the folder. If the first argument is an
ssa name that is itself a result of a typecast of an ADDR_EXPR to an
integer, feed the ADDR_EXPR to the folder rather than the ssa name.
*/
static void
simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
{
tree res;
tree arg1 = gimple_assign_rhs1 (stmt);
tree arg2 = gimple_assign_rhs2 (stmt);
if (TREE_CODE (arg2) != INTEGER_CST)
return;
if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
{
gimple def = SSA_NAME_DEF_STMT (arg1);
if (gimple_assign_cast_p (def)
&& INTEGRAL_TYPE_P (gimple_expr_type (def)))
{
tree op = gimple_assign_rhs1 (def);
if (TREE_CODE (op) == ADDR_EXPR)
arg1 = op;
}
}
res = fold_binary (BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
arg1, arg2);
if (res && is_gimple_min_invariant (res))
{
gimple_assign_set_rhs_from_tree (gsi, res);
update_stmt (stmt);
}
return;
}
/* Main entry point for the forward propagation optimizer. */
static unsigned int
@ -1206,6 +1253,11 @@ tree_ssa_forward_propagate_single_use_vars (void)
else
gsi_next (&gsi);
}
else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
{
simplify_bitwise_and (&gsi, stmt);
gsi_next (&gsi);
}
else
gsi_next (&gsi);
}