tree-vrp.c (simplify_internal_call_using_ranges): If only one range is range_int_cst_p...

* tree-vrp.c (simplify_internal_call_using_ranges): If only
	one range is range_int_cst_p, but not both, at least optimize
	addition/subtraction of 0 and multiplication by 0 or 1.
	* gimple-fold.c (gimple_fold_call): Fold
	IFN_UBSAN_CHECK_{ADD,SUB,MUL}.
	(gimple_fold_stmt_to_constant_1): If both op0 and op1 aren't
	INTEGER_CSTs, try to fold at least x * 0 and y - y.

From-SVN: r208840
This commit is contained in:
Jakub Jelinek 2014-03-26 10:17:10 +01:00 committed by Jakub Jelinek
parent ee0cff6049
commit 368b454d67
3 changed files with 124 additions and 27 deletions

View File

@ -1,3 +1,13 @@
2014-03-26 Jakub Jelinek <jakub@redhat.com>
* tree-vrp.c (simplify_internal_call_using_ranges): If only
one range is range_int_cst_p, but not both, at least optimize
addition/subtraction of 0 and multiplication by 0 or 1.
* gimple-fold.c (gimple_fold_call): Fold
IFN_UBSAN_CHECK_{ADD,SUB,MUL}.
(gimple_fold_stmt_to_constant_1): If both op0 and op1 aren't
INTEGER_CSTs, try to fold at least x * 0 and y - y.
2014-03-26 Eric Botcazou <ebotcazou@adacore.com>
PR rtl-optimization/60452

View File

@ -1186,13 +1186,56 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
changed |= targetm.gimple_fold_builtin (gsi);
}
else if (gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)
else if (gimple_call_internal_p (stmt))
{
tree result = fold_builtin_expect (gimple_location (stmt),
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2));
enum tree_code subcode = ERROR_MARK;
tree result = NULL_TREE;
switch (gimple_call_internal_fn (stmt))
{
case IFN_BUILTIN_EXPECT:
result = fold_builtin_expect (gimple_location (stmt),
gimple_call_arg (stmt, 0),
gimple_call_arg (stmt, 1),
gimple_call_arg (stmt, 2));
break;
case IFN_UBSAN_CHECK_ADD:
subcode = PLUS_EXPR;
break;
case IFN_UBSAN_CHECK_SUB:
subcode = MINUS_EXPR;
break;
case IFN_UBSAN_CHECK_MUL:
subcode = MULT_EXPR;
break;
default:
break;
}
if (subcode != ERROR_MARK)
{
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
/* x = y + 0; x = y - 0; x = y * 0; */
if (integer_zerop (arg1))
result = subcode == MULT_EXPR
? build_zero_cst (TREE_TYPE (arg0))
: arg0;
/* x = 0 + y; x = 0 * y; */
else if (subcode != MINUS_EXPR && integer_zerop (arg0))
result = subcode == MULT_EXPR
? build_zero_cst (TREE_TYPE (arg0))
: arg1;
/* x = y - y; */
else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
result = build_zero_cst (TREE_TYPE (arg0));
/* x = y * 1; x = 1 * y; */
else if (subcode == MULT_EXPR)
{
if (integer_onep (arg1))
result = arg0;
else if (integer_onep (arg0))
result = arg1;
}
}
if (result)
{
if (!update_call_from_tree (gsi, result))
@ -2688,15 +2731,32 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
default:
return NULL_TREE;
}
tree op0 = (*valueize) (gimple_call_arg (stmt, 0));
tree op1 = (*valueize) (gimple_call_arg (stmt, 1));
tree arg0 = gimple_call_arg (stmt, 0);
tree arg1 = gimple_call_arg (stmt, 1);
tree op0 = (*valueize) (arg0);
tree op1 = (*valueize) (arg1);
if (TREE_CODE (op0) != INTEGER_CST
|| TREE_CODE (op1) != INTEGER_CST)
return NULL_TREE;
tree res = fold_binary_loc (loc, subcode,
TREE_TYPE (gimple_call_arg (stmt, 0)),
op0, op1);
{
switch (subcode)
{
case MULT_EXPR:
/* x * 0 = 0 * x = 0 without overflow. */
if (integer_zerop (op0) || integer_zerop (op1))
return build_zero_cst (TREE_TYPE (arg0));
break;
case MINUS_EXPR:
/* y - y = 0 without overflow. */
if (operand_equal_p (op0, op1, 0))
return build_zero_cst (TREE_TYPE (arg0));
break;
default:
break;
}
}
tree res
= fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
if (res
&& TREE_CODE (res) == INTEGER_CST
&& !TREE_OVERFLOW (res))

View File

@ -9336,31 +9336,58 @@ simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
else if (TREE_CODE (op0) == INTEGER_CST)
set_value_range_to_value (&vr0, op0, NULL);
else
return false;
set_value_range_to_varying (&vr0);
if (TREE_CODE (op1) == SSA_NAME)
vr1 = *get_value_range (op1);
else if (TREE_CODE (op1) == INTEGER_CST)
set_value_range_to_value (&vr1, op1, NULL);
else
return false;
set_value_range_to_varying (&vr1);
if (!range_int_cst_p (&vr0) || !range_int_cst_p (&vr1))
return false;
tree r1 = int_const_binop (subcode, vr0.min, vr1.min);
tree r2 = int_const_binop (subcode, vr0.max, vr1.max);
if (r1 == NULL_TREE || TREE_OVERFLOW (r1)
|| r2 == NULL_TREE || TREE_OVERFLOW (r2))
return false;
if (subcode == MULT_EXPR)
if (!range_int_cst_p (&vr0))
{
tree r3 = int_const_binop (subcode, vr0.min, vr1.max);
tree r4 = int_const_binop (subcode, vr0.max, vr1.min);
if (r3 == NULL_TREE || TREE_OVERFLOW (r3)
|| r4 == NULL_TREE || TREE_OVERFLOW (r4))
/* If one range is VR_ANTI_RANGE, VR_VARYING etc.,
optimize at least x = y + 0; x = y - 0; x = y * 0;
and x = y * 1; which never overflow. */
if (!range_int_cst_p (&vr1))
return false;
if (tree_int_cst_sgn (vr1.min) == -1)
return false;
if (compare_tree_int (vr1.max, subcode == MULT_EXPR) == 1)
return false;
}
else if (!range_int_cst_p (&vr1))
{
/* If one range is VR_ANTI_RANGE, VR_VARYING etc.,
optimize at least x = 0 + y; x = 0 * y; and x = 1 * y;
which never overflow. */
if (subcode == MINUS_EXPR)
return false;
if (!range_int_cst_p (&vr0))
return false;
if (tree_int_cst_sgn (vr0.min) == -1)
return false;
if (compare_tree_int (vr0.max, subcode == MULT_EXPR) == 1)
return false;
}
else
{
tree r1 = int_const_binop (subcode, vr0.min, vr1.min);
tree r2 = int_const_binop (subcode, vr0.max, vr1.max);
if (r1 == NULL_TREE || TREE_OVERFLOW (r1)
|| r2 == NULL_TREE || TREE_OVERFLOW (r2))
return false;
if (subcode == MULT_EXPR)
{
tree r3 = int_const_binop (subcode, vr0.min, vr1.max);
tree r4 = int_const_binop (subcode, vr0.max, vr1.min);
if (r3 == NULL_TREE || TREE_OVERFLOW (r3)
|| r4 == NULL_TREE || TREE_OVERFLOW (r4))
return false;
}
}
gimple g = gimple_build_assign_with_ops (subcode, gimple_call_lhs (stmt),
op0, op1);
gsi_replace (gsi, g, false);