match.pd: Implement bitwise binary and unary simplifications from tree-ssa-forwprop.c.
2014-11-06 Richard Biener <rguenther@suse.de> * match.pd: Implement bitwise binary and unary simplifications from tree-ssa-forwprop.c. * fold-const.c (fold_unary_loc): Remove them here. (fold_binary_loc): Likewise. * tree-ssa-forwprop.c (simplify_not_neg_expr): Remove. (truth_valued_ssa_name): Likewise. (lookup_logical_inverted_value): Likewise. (simplify_bitwise_binary_1): Likewise. (hoist_conversion_for_bitop_p): Likewise. (simplify_bitwise_binary_boolean): Likewise. (simplify_bitwise_binary): Likewise. (pass_forwprop::execute): Remove calls to simplify_not_neg_expr and simplify_bitwise_binary. * genmatch.c (dt_node::append_true_op): Use safe_as_a for parent. (decision_tree::insert): Also insert non-expressions. * gcc.dg/tree-ssa/forwprop-28.c: Adjust scanning for the desired transform. From-SVN: r217178
This commit is contained in:
parent
45ea41fe8a
commit
5609420fba
@ -1,3 +1,21 @@
|
||||
2014-11-06 Richard Biener <rguenther@suse.de>
|
||||
|
||||
* match.pd: Implement bitwise binary and unary simplifications
|
||||
from tree-ssa-forwprop.c.
|
||||
* fold-const.c (fold_unary_loc): Remove them here.
|
||||
(fold_binary_loc): Likewise.
|
||||
* tree-ssa-forwprop.c (simplify_not_neg_expr): Remove.
|
||||
(truth_valued_ssa_name): Likewise.
|
||||
(lookup_logical_inverted_value): Likewise.
|
||||
(simplify_bitwise_binary_1): Likewise.
|
||||
(hoist_conversion_for_bitop_p): Likewise.
|
||||
(simplify_bitwise_binary_boolean): Likewise.
|
||||
(simplify_bitwise_binary): Likewise.
|
||||
(pass_forwprop::execute): Remove calls to simplify_not_neg_expr
|
||||
and simplify_bitwise_binary.
|
||||
* genmatch.c (dt_node::append_true_op): Use safe_as_a for parent.
|
||||
(decision_tree::insert): Also insert non-expressions.
|
||||
|
||||
2014-11-06 Hale Wang <Hale.Wang@arm.com>
|
||||
|
||||
* config/arm/arm-cores.def: Add support for
|
||||
|
@ -8008,8 +8008,6 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
|
||||
case BIT_NOT_EXPR:
|
||||
if (TREE_CODE (arg0) == INTEGER_CST)
|
||||
return fold_not_const (arg0, type);
|
||||
else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
|
||||
return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
||||
/* Convert ~ (-A) to A - 1. */
|
||||
else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
|
||||
return fold_build2_loc (loc, MINUS_EXPR, type,
|
||||
@ -11152,26 +11150,6 @@ fold_binary_loc (location_t loc,
|
||||
arg1);
|
||||
}
|
||||
|
||||
/* (X & Y) | Y is (X, Y). */
|
||||
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
||||
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
||||
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
|
||||
/* (X & Y) | X is (Y, X). */
|
||||
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
||||
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
||||
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
||||
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
|
||||
/* X | (X & Y) is (Y, X). */
|
||||
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
||||
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
|
||||
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
|
||||
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
|
||||
/* X | (Y & X) is (Y, X). */
|
||||
if (TREE_CODE (arg1) == BIT_AND_EXPR
|
||||
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
||||
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
||||
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
|
||||
|
||||
/* (X & ~Y) | (~X & Y) is X ^ Y */
|
||||
if (TREE_CODE (arg0) == BIT_AND_EXPR
|
||||
&& TREE_CODE (arg1) == BIT_AND_EXPR)
|
||||
@ -11391,42 +11369,6 @@ fold_binary_loc (location_t loc,
|
||||
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
|
||||
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
|
||||
|
||||
/* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
|
||||
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
||||
&& TREE_CODE (arg1) == INTEGER_CST
|
||||
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
|
||||
{
|
||||
tree tmp1 = fold_convert_loc (loc, type, arg1);
|
||||
tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
|
||||
tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
|
||||
tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
|
||||
tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
|
||||
return
|
||||
fold_convert_loc (loc, type,
|
||||
fold_build2_loc (loc, BIT_IOR_EXPR,
|
||||
type, tmp2, tmp3));
|
||||
}
|
||||
|
||||
/* (X | Y) & Y is (X, Y). */
|
||||
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
||||
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
|
||||
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
|
||||
/* (X | Y) & X is (Y, X). */
|
||||
if (TREE_CODE (arg0) == BIT_IOR_EXPR
|
||||
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
|
||||
&& reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
|
||||
return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
|
||||
/* X & (X | Y) is (Y, X). */
|
||||
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
||||
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
|
||||
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
|
||||
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
|
||||
/* X & (Y | X) is (Y, X). */
|
||||
if (TREE_CODE (arg1) == BIT_IOR_EXPR
|
||||
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
|
||||
&& reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
|
||||
return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
|
||||
|
||||
/* Fold (X ^ 1) & 1 as (X & 1) == 0. */
|
||||
if (TREE_CODE (arg0) == BIT_XOR_EXPR
|
||||
&& INTEGRAL_TYPE_P (type)
|
||||
|
@ -1126,7 +1126,7 @@ dt_node::append_op (operand *op, dt_node *parent, unsigned pos)
|
||||
dt_node *
|
||||
dt_node::append_true_op (dt_node *parent, unsigned pos)
|
||||
{
|
||||
dt_operand *parent_ = as_a<dt_operand *> (parent);
|
||||
dt_operand *parent_ = safe_as_a<dt_operand *> (parent);
|
||||
dt_operand *n = new dt_operand (DT_TRUE, 0, 0, parent_, pos);
|
||||
return append_node (n);
|
||||
}
|
||||
@ -1232,9 +1232,6 @@ at_assert_elm:
|
||||
void
|
||||
decision_tree::insert (struct simplify *s, unsigned pattern_no)
|
||||
{
|
||||
if (s->match->type != operand::OP_EXPR)
|
||||
return;
|
||||
|
||||
dt_operand **indexes = XCNEWVEC (dt_operand *, s->capture_max + 1);
|
||||
dt_node *p = decision_tree::insert_operand (root, s->match, indexes);
|
||||
p->append_simplify (s, pattern_no, indexes);
|
||||
|
128
gcc/match.pd
128
gcc/match.pd
@ -113,6 +113,134 @@ along with GCC; see the file COPYING3. If not see
|
||||
@0)
|
||||
|
||||
|
||||
/* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
|
||||
when profitable.
|
||||
For bitwise binary operations apply operand conversions to the
|
||||
binary operation result instead of to the operands. This allows
|
||||
to combine successive conversions and bitwise binary operations.
|
||||
We combine the above two cases by using a conditional convert. */
|
||||
(for bitop (bit_and bit_ior bit_xor)
|
||||
(simplify
|
||||
(bitop (convert @0) (convert? @1))
|
||||
(if (((TREE_CODE (@1) == INTEGER_CST
|
||||
&& INTEGRAL_TYPE_P (TREE_TYPE (@0))
|
||||
&& int_fits_type_p (@1, TREE_TYPE (@0))
|
||||
/* ??? This transform conflicts with fold-const.c doing
|
||||
Convert (T)(x & c) into (T)x & (T)c, if c is an integer
|
||||
constants (if x has signed type, the sign bit cannot be set
|
||||
in c). This folds extension into the BIT_AND_EXPR.
|
||||
Restrict it to GIMPLE to avoid endless recursions. */
|
||||
&& (bitop != BIT_AND_EXPR || GIMPLE))
|
||||
|| types_compatible_p (TREE_TYPE (@0), TREE_TYPE (@1)))
|
||||
&& (/* That's a good idea if the conversion widens the operand, thus
|
||||
after hoisting the conversion the operation will be narrower. */
|
||||
TYPE_PRECISION (TREE_TYPE (@0)) < TYPE_PRECISION (type)
|
||||
/* It's also a good idea if the conversion is to a non-integer
|
||||
mode. */
|
||||
|| GET_MODE_CLASS (TYPE_MODE (type)) != MODE_INT
|
||||
/* Or if the precision of TO is not the same as the precision
|
||||
of its mode. */
|
||||
|| TYPE_PRECISION (type) != GET_MODE_PRECISION (TYPE_MODE (type))))
|
||||
(convert (bitop @0 (convert @1))))))
|
||||
|
||||
/* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
|
||||
(for bitop (bit_and bit_ior bit_xor)
|
||||
(simplify
|
||||
(bitop (bit_and:c @0 @1) (bit_and @2 @1))
|
||||
(bit_and (bitop @0 @2) @1)))
|
||||
|
||||
/* (x | CST1) & CST2 -> (x & CST2) | (CST1 & CST2) */
|
||||
(simplify
|
||||
(bit_and (bit_ior @0 CONSTANT_CLASS_P@1) CONSTANT_CLASS_P@2)
|
||||
(bit_ior (bit_and @0 @2) (bit_and @1 @2)))
|
||||
|
||||
/* Combine successive equal operations with constants. */
|
||||
(for bitop (bit_and bit_ior bit_xor)
|
||||
(simplify
|
||||
(bitop (bitop @0 CONSTANT_CLASS_P@1) CONSTANT_CLASS_P@2)
|
||||
(bitop @0 (bitop @1 @2))))
|
||||
|
||||
/* Try simple folding for X op !X, and X op X with the help
|
||||
of the truth_valued_p and logical_inverted_value predicates. */
|
||||
(match truth_valued_p
|
||||
@0
|
||||
(if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
|
||||
(for op (lt le eq ne ge gt truth_and truth_andif truth_or truth_orif truth_xor)
|
||||
(match truth_valued_p
|
||||
(op @0 @1)))
|
||||
(match truth_valued_p
|
||||
(truth_not @0))
|
||||
|
||||
(match (logical_inverted_value @0)
|
||||
(bit_not truth_valued_p@0))
|
||||
(match (logical_inverted_value @0)
|
||||
(eq @0 integer_zerop)
|
||||
(if (INTEGRAL_TYPE_P (TREE_TYPE (@0)))))
|
||||
(match (logical_inverted_value @0)
|
||||
(ne truth_valued_p@0 integer_onep)
|
||||
(if (INTEGRAL_TYPE_P (TREE_TYPE (@0)))))
|
||||
(match (logical_inverted_value @0)
|
||||
(bit_xor truth_valued_p@0 integer_onep))
|
||||
|
||||
/* X & !X -> 0. */
|
||||
(simplify
|
||||
(bit_and:c @0 (logical_inverted_value @0))
|
||||
{ build_zero_cst (type); })
|
||||
/* X | !X and X ^ !X -> 1, , if X is truth-valued. */
|
||||
(for op (bit_ior bit_xor)
|
||||
(simplify
|
||||
(op:c truth_valued_p@0 (logical_inverted_value @0))
|
||||
{ build_one_cst (type); }))
|
||||
|
||||
(for bitop (bit_and bit_ior)
|
||||
rbitop (bit_ior bit_and)
|
||||
/* (x | y) & x -> x */
|
||||
/* (x & y) | x -> x */
|
||||
(simplify
|
||||
(bitop:c (rbitop:c @0 @1) @0)
|
||||
@0)
|
||||
/* (~x | y) & x -> x & y */
|
||||
/* (~x & y) | x -> x | y */
|
||||
(simplify
|
||||
(bitop:c (rbitop:c (bit_not @0) @1) @0)
|
||||
(bitop @0 @1)))
|
||||
|
||||
/* If arg1 and arg2 are booleans (or any single bit type)
|
||||
then try to simplify:
|
||||
|
||||
(~X & Y) -> X < Y
|
||||
(X & ~Y) -> Y < X
|
||||
(~X | Y) -> X <= Y
|
||||
(X | ~Y) -> Y <= X
|
||||
|
||||
But only do this if our result feeds into a comparison as
|
||||
this transformation is not always a win, particularly on
|
||||
targets with and-not instructions.
|
||||
-> simplify_bitwise_binary_boolean */
|
||||
(simplify
|
||||
(ne (bit_and:c (bit_not @0) @1) integer_zerop)
|
||||
(if (INTEGRAL_TYPE_P (TREE_TYPE (@1))
|
||||
&& TYPE_PRECISION (TREE_TYPE (@1)) == 1)
|
||||
(lt @0 @1)))
|
||||
(simplify
|
||||
(ne (bit_ior:c (bit_not @0) @1) integer_zerop)
|
||||
(if (INTEGRAL_TYPE_P (TREE_TYPE (@1))
|
||||
&& TYPE_PRECISION (TREE_TYPE (@1)) == 1)
|
||||
(le @0 @1)))
|
||||
|
||||
/* From tree-ssa-forwprop.c:simplify_not_neg_expr. */
|
||||
|
||||
/* ~~x -> x */
|
||||
(simplify
|
||||
(bit_not (bit_not @0))
|
||||
@0)
|
||||
|
||||
/* The corresponding (negate (negate @0)) -> @0 is in match-plusminus.pd. */
|
||||
(simplify
|
||||
(negate (negate @0))
|
||||
@0)
|
||||
|
||||
|
||||
/* Simplifications of conversions. */
|
||||
|
||||
/* Basic strip-useless-type-conversions / strip_nops. */
|
||||
|
@ -1,3 +1,8 @@
|
||||
2014-11-06 Richard Biener <rguenther@suse.de>
|
||||
|
||||
* gcc.dg/tree-ssa/forwprop-28.c: Adjust scanning for the
|
||||
desired transform.
|
||||
|
||||
2014-11-05 Matthew Fortune <matthew.fortune@imgtec.com>
|
||||
|
||||
* gcc.target/mips/asm-1.c (bar): Add prototype.
|
||||
|
@ -1,7 +1,7 @@
|
||||
/* Setting LOGICAL_OP_NON_SHORT_CIRCUIT to 0 leads to two conditional jumps
|
||||
when evaluating an && condition. VRP is not able to optimize this. */
|
||||
/* { dg-do compile { target { ! { logical_op_short_circuit || { m68k*-*-* mmix*-*-* mep*-*-* bfin*-*-* v850*-*-* moxie*-*-* cris*-*-* m32c*-*-* fr30*-*-* mcore*-*-* powerpc*-*-* xtensa*-*-* hppa*-*-* } } } } } */
|
||||
/* { dg-options "-O2 -fdump-tree-forwprop1" } */
|
||||
/* { dg-options "-O2 -fdump-tree-forwprop1-details" } */
|
||||
|
||||
extern char *frob (void);
|
||||
extern _Bool testit (void);
|
||||
@ -79,6 +79,6 @@ test_8 (int code)
|
||||
oof ();
|
||||
}
|
||||
|
||||
/* { dg-final { scan-tree-dump-times "Replaced" 8 "forwprop1"} } */
|
||||
/* { dg-final { scan-tree-dump-times "simplified to if \\\(\[^ ]* <" 8 "forwprop1"} } */
|
||||
/* { dg-final { cleanup-tree-dump "forwprop1" } } */
|
||||
|
||||
|
@ -1253,49 +1253,6 @@ simplify_conversion_from_bitmask (gimple_stmt_iterator *gsi_p)
|
||||
}
|
||||
|
||||
|
||||
/* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
|
||||
If so, we can change STMT into lhs = y which can later be copy
|
||||
propagated. Similarly for negation.
|
||||
|
||||
This could trivially be formulated as a forward propagation
|
||||
to immediate uses. However, we already had an implementation
|
||||
from DOM which used backward propagation via the use-def links.
|
||||
|
||||
It turns out that backward propagation is actually faster as
|
||||
there's less work to do for each NOT/NEG expression we find.
|
||||
Backwards propagation needs to look at the statement in a single
|
||||
backlink. Forward propagation needs to look at potentially more
|
||||
than one forward link.
|
||||
|
||||
Returns true when the statement was changed. */
|
||||
|
||||
static bool
|
||||
simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
|
||||
{
|
||||
gimple stmt = gsi_stmt (*gsi_p);
|
||||
tree rhs = gimple_assign_rhs1 (stmt);
|
||||
gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
|
||||
|
||||
/* See if the RHS_DEF_STMT has the same form as our statement. */
|
||||
if (is_gimple_assign (rhs_def_stmt)
|
||||
&& gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
|
||||
{
|
||||
tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
|
||||
|
||||
/* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
|
||||
if (TREE_CODE (rhs_def_operand) == SSA_NAME
|
||||
&& ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
|
||||
{
|
||||
gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
|
||||
stmt = gsi_stmt (*gsi_p);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Helper function for simplify_gimple_switch. Remove case labels that
|
||||
have values outside the range of the new type. */
|
||||
|
||||
@ -1714,126 +1671,6 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Checks if expression has type of one-bit precision, or is a known
|
||||
truth-valued expression. */
|
||||
static bool
|
||||
truth_valued_ssa_name (tree name)
|
||||
{
|
||||
gimple def;
|
||||
tree type = TREE_TYPE (name);
|
||||
|
||||
if (!INTEGRAL_TYPE_P (type))
|
||||
return false;
|
||||
/* Don't check here for BOOLEAN_TYPE as the precision isn't
|
||||
necessarily one and so ~X is not equal to !X. */
|
||||
if (TYPE_PRECISION (type) == 1)
|
||||
return true;
|
||||
def = SSA_NAME_DEF_STMT (name);
|
||||
if (is_gimple_assign (def))
|
||||
return truth_value_p (gimple_assign_rhs_code (def));
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Helper routine for simplify_bitwise_binary_1 function.
|
||||
Return for the SSA name NAME the expression X if it mets condition
|
||||
NAME = !X. Otherwise return NULL_TREE.
|
||||
Detected patterns for NAME = !X are:
|
||||
!X and X == 0 for X with integral type.
|
||||
X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
|
||||
static tree
|
||||
lookup_logical_inverted_value (tree name)
|
||||
{
|
||||
tree op1, op2;
|
||||
enum tree_code code;
|
||||
gimple def;
|
||||
|
||||
/* If name has none-intergal type, or isn't a SSA_NAME, then
|
||||
return. */
|
||||
if (TREE_CODE (name) != SSA_NAME
|
||||
|| !INTEGRAL_TYPE_P (TREE_TYPE (name)))
|
||||
return NULL_TREE;
|
||||
def = SSA_NAME_DEF_STMT (name);
|
||||
if (!is_gimple_assign (def))
|
||||
return NULL_TREE;
|
||||
|
||||
code = gimple_assign_rhs_code (def);
|
||||
op1 = gimple_assign_rhs1 (def);
|
||||
op2 = NULL_TREE;
|
||||
|
||||
/* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
|
||||
If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
|
||||
if (code == EQ_EXPR || code == NE_EXPR
|
||||
|| code == BIT_XOR_EXPR)
|
||||
op2 = gimple_assign_rhs2 (def);
|
||||
|
||||
switch (code)
|
||||
{
|
||||
case BIT_NOT_EXPR:
|
||||
if (truth_valued_ssa_name (name))
|
||||
return op1;
|
||||
break;
|
||||
case EQ_EXPR:
|
||||
/* Check if we have X == 0 and X has an integral type. */
|
||||
if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
|
||||
break;
|
||||
if (integer_zerop (op2))
|
||||
return op1;
|
||||
break;
|
||||
case NE_EXPR:
|
||||
/* Check if we have X != 1 and X is a truth-valued. */
|
||||
if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
|
||||
break;
|
||||
if (integer_onep (op2) && truth_valued_ssa_name (op1))
|
||||
return op1;
|
||||
break;
|
||||
case BIT_XOR_EXPR:
|
||||
/* Check if we have X ^ 1 and X is truth valued. */
|
||||
if (integer_onep (op2) && truth_valued_ssa_name (op1))
|
||||
return op1;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
||||
/* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
|
||||
operations CODE, if one operand has the logically inverted
|
||||
value of the other. */
|
||||
static tree
|
||||
simplify_bitwise_binary_1 (enum tree_code code, tree type,
|
||||
tree arg1, tree arg2)
|
||||
{
|
||||
tree anot;
|
||||
|
||||
/* If CODE isn't a bitwise binary operation, return NULL_TREE. */
|
||||
if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
|
||||
&& code != BIT_XOR_EXPR)
|
||||
return NULL_TREE;
|
||||
|
||||
/* First check if operands ARG1 and ARG2 are equal. If so
|
||||
return NULL_TREE as this optimization is handled fold_stmt. */
|
||||
if (arg1 == arg2)
|
||||
return NULL_TREE;
|
||||
/* See if we have in arguments logical-not patterns. */
|
||||
if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
|
||||
|| anot != arg2)
|
||||
&& ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
|
||||
|| anot != arg1))
|
||||
return NULL_TREE;
|
||||
|
||||
/* X & !X -> 0. */
|
||||
if (code == BIT_AND_EXPR)
|
||||
return fold_convert (type, integer_zero_node);
|
||||
/* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
|
||||
if (truth_valued_ssa_name (anot))
|
||||
return fold_convert (type, integer_one_node);
|
||||
|
||||
/* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
||||
/* Given a ssa_name in NAME see if it was defined by an assignment and
|
||||
set CODE to be the code and ARG1 to the first operand on the rhs and ARG2
|
||||
to the second operand on the rhs. */
|
||||
@ -1879,353 +1716,6 @@ defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
|
||||
/* Ignore arg3 currently. */
|
||||
}
|
||||
|
||||
/* Return true if a conversion of an operand from type FROM to type TO
|
||||
should be applied after performing the operation instead. */
|
||||
|
||||
static bool
|
||||
hoist_conversion_for_bitop_p (tree to, tree from)
|
||||
{
|
||||
/* That's a good idea if the conversion widens the operand, thus
|
||||
after hoisting the conversion the operation will be narrower. */
|
||||
if (TYPE_PRECISION (from) < TYPE_PRECISION (to))
|
||||
return true;
|
||||
|
||||
/* It's also a good idea if the conversion is to a non-integer mode. */
|
||||
if (GET_MODE_CLASS (TYPE_MODE (to)) != MODE_INT)
|
||||
return true;
|
||||
|
||||
/* Or if the precision of TO is not the same as the precision
|
||||
of its mode. */
|
||||
if (TYPE_PRECISION (to) != GET_MODE_PRECISION (TYPE_MODE (to)))
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/* GSI points to a statement of the form
|
||||
|
||||
result = OP0 CODE OP1
|
||||
|
||||
Where OP0 and OP1 are single bit SSA_NAMEs and CODE is either
|
||||
BIT_AND_EXPR or BIT_IOR_EXPR.
|
||||
|
||||
If OP0 is fed by a bitwise negation of another single bit SSA_NAME,
|
||||
then we can simplify the two statements into a single LT_EXPR or LE_EXPR
|
||||
when code is BIT_AND_EXPR and BIT_IOR_EXPR respectively.
|
||||
|
||||
If a simplification is made, return TRUE, else return FALSE. */
|
||||
static bool
|
||||
simplify_bitwise_binary_boolean (gimple_stmt_iterator *gsi,
|
||||
enum tree_code code,
|
||||
tree op0, tree op1)
|
||||
{
|
||||
gimple op0_def_stmt = SSA_NAME_DEF_STMT (op0);
|
||||
|
||||
if (!is_gimple_assign (op0_def_stmt)
|
||||
|| (gimple_assign_rhs_code (op0_def_stmt) != BIT_NOT_EXPR))
|
||||
return false;
|
||||
|
||||
tree x = gimple_assign_rhs1 (op0_def_stmt);
|
||||
if (TREE_CODE (x) == SSA_NAME
|
||||
&& INTEGRAL_TYPE_P (TREE_TYPE (x))
|
||||
&& TYPE_PRECISION (TREE_TYPE (x)) == 1
|
||||
&& TYPE_UNSIGNED (TREE_TYPE (x)) == TYPE_UNSIGNED (TREE_TYPE (op1)))
|
||||
{
|
||||
enum tree_code newcode;
|
||||
|
||||
gimple stmt = gsi_stmt (*gsi);
|
||||
gimple_assign_set_rhs1 (stmt, x);
|
||||
gimple_assign_set_rhs2 (stmt, op1);
|
||||
if (code == BIT_AND_EXPR)
|
||||
newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LT_EXPR : GT_EXPR;
|
||||
else
|
||||
newcode = TYPE_UNSIGNED (TREE_TYPE (x)) ? LE_EXPR : GE_EXPR;
|
||||
gimple_assign_set_rhs_code (stmt, newcode);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
||||
}
|
||||
|
||||
/* Simplify bitwise binary operations.
|
||||
Return true if a transformation applied, otherwise return false. */
|
||||
|
||||
static bool
|
||||
simplify_bitwise_binary (gimple_stmt_iterator *gsi)
|
||||
{
|
||||
gimple stmt = gsi_stmt (*gsi);
|
||||
tree arg1 = gimple_assign_rhs1 (stmt);
|
||||
tree arg2 = gimple_assign_rhs2 (stmt);
|
||||
enum tree_code code = gimple_assign_rhs_code (stmt);
|
||||
tree res;
|
||||
tree def1_arg1, def1_arg2, def2_arg1, def2_arg2;
|
||||
enum tree_code def1_code, def2_code;
|
||||
|
||||
defcodefor_name (arg1, &def1_code, &def1_arg1, &def1_arg2);
|
||||
defcodefor_name (arg2, &def2_code, &def2_arg1, &def2_arg2);
|
||||
|
||||
/* Try to fold (type) X op CST -> (type) (X op ((type-x) CST))
|
||||
when profitable. */
|
||||
if (TREE_CODE (arg2) == INTEGER_CST
|
||||
&& CONVERT_EXPR_CODE_P (def1_code)
|
||||
&& hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1))
|
||||
&& INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
|
||||
&& int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
|
||||
{
|
||||
gimple newop;
|
||||
tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
|
||||
newop =
|
||||
gimple_build_assign_with_ops (code, tem, def1_arg1,
|
||||
fold_convert_loc (gimple_location (stmt),
|
||||
TREE_TYPE (def1_arg1),
|
||||
arg2));
|
||||
gimple_set_location (newop, gimple_location (stmt));
|
||||
gsi_insert_before (gsi, newop, GSI_SAME_STMT);
|
||||
gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
|
||||
tem, NULL_TREE, NULL_TREE);
|
||||
update_stmt (gsi_stmt (*gsi));
|
||||
return true;
|
||||
}
|
||||
|
||||
/* For bitwise binary operations apply operand conversions to the
|
||||
binary operation result instead of to the operands. This allows
|
||||
to combine successive conversions and bitwise binary operations. */
|
||||
if (CONVERT_EXPR_CODE_P (def1_code)
|
||||
&& CONVERT_EXPR_CODE_P (def2_code)
|
||||
&& types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
|
||||
&& hoist_conversion_for_bitop_p (TREE_TYPE (arg1), TREE_TYPE (def1_arg1)))
|
||||
{
|
||||
gimple newop;
|
||||
tree tem = make_ssa_name (TREE_TYPE (def1_arg1), NULL);
|
||||
newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
|
||||
gimple_set_location (newop, gimple_location (stmt));
|
||||
gsi_insert_before (gsi, newop, GSI_SAME_STMT);
|
||||
gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
|
||||
tem, NULL_TREE, NULL_TREE);
|
||||
update_stmt (gsi_stmt (*gsi));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/* Simplify (A & B) OP0 (C & B) to (A OP0 C) & B. */
|
||||
if (def1_code == def2_code
|
||||
&& def1_code == BIT_AND_EXPR
|
||||
&& operand_equal_for_phi_arg_p (def1_arg2,
|
||||
def2_arg2))
|
||||
{
|
||||
tree b = def1_arg2;
|
||||
tree a = def1_arg1;
|
||||
tree c = def2_arg1;
|
||||
tree inner = fold_build2 (code, TREE_TYPE (arg2), a, c);
|
||||
/* If A OP0 C (this usually means C is the same as A) is 0
|
||||
then fold it down correctly. */
|
||||
if (integer_zerop (inner))
|
||||
{
|
||||
gimple_assign_set_rhs_from_tree (gsi, inner);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
/* If A OP0 C (this usually means C is the same as A) is a ssa_name
|
||||
then fold it down correctly. */
|
||||
else if (TREE_CODE (inner) == SSA_NAME)
|
||||
{
|
||||
tree outer = fold_build2 (def1_code, TREE_TYPE (inner),
|
||||
inner, b);
|
||||
gimple_assign_set_rhs_from_tree (gsi, outer);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
gimple newop;
|
||||
tree tem;
|
||||
tem = make_ssa_name (TREE_TYPE (arg2), NULL);
|
||||
newop = gimple_build_assign_with_ops (code, tem, a, c);
|
||||
gimple_set_location (newop, gimple_location (stmt));
|
||||
/* Make sure to re-process the new stmt as it's walking upwards. */
|
||||
gsi_insert_before (gsi, newop, GSI_NEW_STMT);
|
||||
gimple_assign_set_rhs1 (stmt, tem);
|
||||
gimple_assign_set_rhs2 (stmt, b);
|
||||
gimple_assign_set_rhs_code (stmt, def1_code);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
|
||||
if (code == BIT_AND_EXPR
|
||||
&& def1_code == BIT_IOR_EXPR
|
||||
&& CONSTANT_CLASS_P (arg2)
|
||||
&& CONSTANT_CLASS_P (def1_arg2))
|
||||
{
|
||||
tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
|
||||
arg2, def1_arg2);
|
||||
tree tem;
|
||||
gimple newop;
|
||||
if (integer_zerop (cst))
|
||||
{
|
||||
gimple_assign_set_rhs1 (stmt, def1_arg1);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
tem = make_ssa_name (TREE_TYPE (arg2), NULL);
|
||||
newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
|
||||
tem, def1_arg1, arg2);
|
||||
gimple_set_location (newop, gimple_location (stmt));
|
||||
/* Make sure to re-process the new stmt as it's walking upwards. */
|
||||
gsi_insert_before (gsi, newop, GSI_NEW_STMT);
|
||||
gimple_assign_set_rhs1 (stmt, tem);
|
||||
gimple_assign_set_rhs2 (stmt, cst);
|
||||
gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Combine successive equal operations with constants. */
|
||||
if ((code == BIT_AND_EXPR
|
||||
|| code == BIT_IOR_EXPR
|
||||
|| code == BIT_XOR_EXPR)
|
||||
&& def1_code == code
|
||||
&& CONSTANT_CLASS_P (arg2)
|
||||
&& CONSTANT_CLASS_P (def1_arg2))
|
||||
{
|
||||
tree cst = fold_build2 (code, TREE_TYPE (arg2),
|
||||
arg2, def1_arg2);
|
||||
gimple_assign_set_rhs1 (stmt, def1_arg1);
|
||||
gimple_assign_set_rhs2 (stmt, cst);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Try simple folding for X op !X, and X op X. */
|
||||
res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
|
||||
if (res != NULL_TREE)
|
||||
{
|
||||
gimple_assign_set_rhs_from_tree (gsi, res);
|
||||
update_stmt (gsi_stmt (*gsi));
|
||||
return true;
|
||||
}
|
||||
|
||||
if (code == BIT_AND_EXPR || code == BIT_IOR_EXPR)
|
||||
{
|
||||
enum tree_code ocode = code == BIT_AND_EXPR ? BIT_IOR_EXPR : BIT_AND_EXPR;
|
||||
if (def1_code == ocode)
|
||||
{
|
||||
tree x = arg2;
|
||||
enum tree_code coden;
|
||||
tree a1, a2;
|
||||
/* ( X | Y) & X -> X */
|
||||
/* ( X & Y) | X -> X */
|
||||
if (x == def1_arg1
|
||||
|| x == def1_arg2)
|
||||
{
|
||||
gimple_assign_set_rhs_from_tree (gsi, x);
|
||||
update_stmt (gsi_stmt (*gsi));
|
||||
return true;
|
||||
}
|
||||
|
||||
defcodefor_name (def1_arg1, &coden, &a1, &a2);
|
||||
/* (~X | Y) & X -> X & Y */
|
||||
/* (~X & Y) | X -> X | Y */
|
||||
if (coden == BIT_NOT_EXPR && a1 == x)
|
||||
{
|
||||
gimple_assign_set_rhs_with_ops (gsi, code,
|
||||
x, def1_arg2);
|
||||
gcc_assert (gsi_stmt (*gsi) == stmt);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
defcodefor_name (def1_arg2, &coden, &a1, &a2);
|
||||
/* (Y | ~X) & X -> X & Y */
|
||||
/* (Y & ~X) | X -> X | Y */
|
||||
if (coden == BIT_NOT_EXPR && a1 == x)
|
||||
{
|
||||
gimple_assign_set_rhs_with_ops (gsi, code,
|
||||
x, def1_arg1);
|
||||
gcc_assert (gsi_stmt (*gsi) == stmt);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (def2_code == ocode)
|
||||
{
|
||||
enum tree_code coden;
|
||||
tree a1;
|
||||
tree x = arg1;
|
||||
/* X & ( X | Y) -> X */
|
||||
/* X | ( X & Y) -> X */
|
||||
if (x == def2_arg1
|
||||
|| x == def2_arg2)
|
||||
{
|
||||
gimple_assign_set_rhs_from_tree (gsi, x);
|
||||
update_stmt (gsi_stmt (*gsi));
|
||||
return true;
|
||||
}
|
||||
defcodefor_name (def2_arg1, &coden, &a1, NULL);
|
||||
/* (~X | Y) & X -> X & Y */
|
||||
/* (~X & Y) | X -> X | Y */
|
||||
if (coden == BIT_NOT_EXPR && a1 == x)
|
||||
{
|
||||
gimple_assign_set_rhs_with_ops (gsi, code,
|
||||
x, def2_arg2);
|
||||
gcc_assert (gsi_stmt (*gsi) == stmt);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
defcodefor_name (def2_arg2, &coden, &a1, NULL);
|
||||
/* (Y | ~X) & X -> X & Y */
|
||||
/* (Y & ~X) | X -> X | Y */
|
||||
if (coden == BIT_NOT_EXPR && a1 == x)
|
||||
{
|
||||
gimple_assign_set_rhs_with_ops (gsi, code,
|
||||
x, def2_arg1);
|
||||
gcc_assert (gsi_stmt (*gsi) == stmt);
|
||||
update_stmt (stmt);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/* If arg1 and arg2 are booleans (or any single bit type)
|
||||
then try to simplify:
|
||||
|
||||
(~X & Y) -> X < Y
|
||||
(X & ~Y) -> Y < X
|
||||
(~X | Y) -> X <= Y
|
||||
(X | ~Y) -> Y <= X
|
||||
|
||||
But only do this if our result feeds into a comparison as
|
||||
this transformation is not always a win, particularly on
|
||||
targets with and-not instructions. */
|
||||
if (TREE_CODE (arg1) == SSA_NAME
|
||||
&& TREE_CODE (arg2) == SSA_NAME
|
||||
&& INTEGRAL_TYPE_P (TREE_TYPE (arg1))
|
||||
&& TYPE_PRECISION (TREE_TYPE (arg1)) == 1
|
||||
&& TYPE_PRECISION (TREE_TYPE (arg2)) == 1
|
||||
&& (TYPE_UNSIGNED (TREE_TYPE (arg1))
|
||||
== TYPE_UNSIGNED (TREE_TYPE (arg2))))
|
||||
{
|
||||
use_operand_p use_p;
|
||||
gimple use_stmt;
|
||||
|
||||
if (single_imm_use (gimple_assign_lhs (stmt), &use_p, &use_stmt))
|
||||
{
|
||||
if (gimple_code (use_stmt) == GIMPLE_COND
|
||||
&& gimple_cond_lhs (use_stmt) == gimple_assign_lhs (stmt)
|
||||
&& integer_zerop (gimple_cond_rhs (use_stmt))
|
||||
&& gimple_cond_code (use_stmt) == NE_EXPR)
|
||||
{
|
||||
if (simplify_bitwise_binary_boolean (gsi, code, arg1, arg2))
|
||||
return true;
|
||||
if (simplify_bitwise_binary_boolean (gsi, code, arg2, arg1))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/* Recognize rotation patterns. Return true if a transformation
|
||||
applied, otherwise return false.
|
||||
@ -3760,12 +3250,8 @@ pass_forwprop::execute (function *fun)
|
||||
tree rhs1 = gimple_assign_rhs1 (stmt);
|
||||
enum tree_code code = gimple_assign_rhs_code (stmt);
|
||||
|
||||
if ((code == BIT_NOT_EXPR
|
||||
|| code == NEGATE_EXPR)
|
||||
&& TREE_CODE (rhs1) == SSA_NAME)
|
||||
changed = simplify_not_neg_expr (&gsi);
|
||||
else if (code == COND_EXPR
|
||||
|| code == VEC_COND_EXPR)
|
||||
if (code == COND_EXPR
|
||||
|| code == VEC_COND_EXPR)
|
||||
{
|
||||
/* In this case the entire COND_EXPR is in rhs1. */
|
||||
if (forward_propagate_into_cond (&gsi)
|
||||
@ -3788,10 +3274,6 @@ pass_forwprop::execute (function *fun)
|
||||
|| code == BIT_XOR_EXPR)
|
||||
&& simplify_rotate (&gsi))
|
||||
changed = true;
|
||||
else if (code == BIT_AND_EXPR
|
||||
|| code == BIT_IOR_EXPR
|
||||
|| code == BIT_XOR_EXPR)
|
||||
changed = simplify_bitwise_binary (&gsi);
|
||||
else if (code == MULT_EXPR)
|
||||
{
|
||||
changed = simplify_mult (&gsi);
|
||||
|
Loading…
Reference in New Issue
Block a user