An alternative fix for PR70944

The transformations made by make_compound_operation apply
only to scalar integer modes.  The fix for PR70944 had enforced
that by returning early for vector modes at the top of the
function.  However, the function is supposed to be recursive,
so we should continue to look at integer suboperands even if
the outer operation is a vector one.

This patch instead splits out the non-recursive parts
of make_compound_operation into a subroutine and checks
that the mode is a scalar integer before calling it.
The patch was originally written to help with the later
conversion to static type checking of mode classes, but it
also happened to reenable optimisation of things like
vec_duplicate operands.

Note that the gen_lowparts in the PLUS and MINUS cases
were redundant, since new_rtx already had mode "mode"
at those points.

gcc/
2016-11-15  Richard Sandiford  <richard.sandiford@arm.com>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

	* combine.c (maybe_swap_commutative_operands): New function.
	(combine_simplify_rtx): Use it.
	(change_zero_ext): Likewise.
	(make_compound_operation_int): New function, split out of...
	(make_compound_operation): ...here.  Use
	maybe_swap_commutative_operands for both.

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r242492
This commit is contained in:
Richard Sandiford 2016-11-16 14:20:40 +00:00 committed by Richard Sandiford
parent 0c012e9bf7
commit ae1a0866a1
2 changed files with 98 additions and 74 deletions

View File

@ -1,3 +1,14 @@
2016-11-16 Richard Sandiford <richard.sandiford@arm.com>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* combine.c (maybe_swap_commutative_operands): New function.
(combine_simplify_rtx): Use it.
(change_zero_ext): Likewise.
(make_compound_operation_int): New function, split out of...
(make_compound_operation): ...here. Use
maybe_swap_commutative_operands for both.
2016-11-16 Richard Earnshaw <rearnsha@arm.com>
* arm/arm-fpus.def (vfpv2): New FPU, currently an alias for 'vfp'.

View File

@ -5479,6 +5479,21 @@ subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
return x;
}
/* If X is a commutative operation whose operands are not in the canonical
order, use substitutions to swap them. */
static void
maybe_swap_commutative_operands (rtx x)
{
if (COMMUTATIVE_ARITH_P (x)
&& swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
{
rtx temp = XEXP (x, 0);
SUBST (XEXP (x, 0), XEXP (x, 1));
SUBST (XEXP (x, 1), temp);
}
}
/* Simplify X, a piece of RTL. We just operate on the expression at the
outer level; call `subst' to simplify recursively. Return the new
expression.
@ -5498,13 +5513,7 @@ combine_simplify_rtx (rtx x, machine_mode op0_mode, int in_dest,
/* If this is a commutative operation, put a constant last and a complex
expression first. We don't need to do this for comparisons here. */
if (COMMUTATIVE_ARITH_P (x)
&& swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
{
temp = XEXP (x, 0);
SUBST (XEXP (x, 0), XEXP (x, 1));
SUBST (XEXP (x, 1), temp);
}
maybe_swap_commutative_operands (x);
/* Try to fold this expression in case we have constants that weren't
present before. */
@ -7744,55 +7753,38 @@ extract_left_shift (rtx x, int count)
return 0;
}
/* Look at the expression rooted at X. Look for expressions
equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
Form these expressions.
/* Subroutine of make_compound_operation. *X_PTR is the rtx at the current
level of the expression and MODE is its mode. IN_CODE is as for
make_compound_operation. *NEXT_CODE_PTR is the value of IN_CODE
that should be used when recursing on operands of *X_PTR.
Return the new rtx, usually just X.
There are two possible actions:
Also, for machines like the VAX that don't have logical shift insns,
try to convert logical to arithmetic shift operations in cases where
they are equivalent. This undoes the canonicalizations to logical
shifts done elsewhere.
- Return null. This tells the caller to recurse on *X_PTR with IN_CODE
equal to *NEXT_CODE_PTR, after which *X_PTR holds the final value.
We try, as much as possible, to re-use rtl expressions to save memory.
- Return a new rtx, which the caller returns directly. */
IN_CODE says what kind of expression we are processing. Normally, it is
SET. In a memory address it is MEM. When processing the arguments of
a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
precisely it is an equality comparison against zero. */
rtx
make_compound_operation (rtx x, enum rtx_code in_code)
static rtx
make_compound_operation_int (machine_mode mode, rtx *x_ptr,
enum rtx_code in_code,
enum rtx_code *next_code_ptr)
{
rtx x = *x_ptr;
enum rtx_code next_code = *next_code_ptr;
enum rtx_code code = GET_CODE (x);
machine_mode mode = GET_MODE (x);
int mode_width = GET_MODE_PRECISION (mode);
rtx rhs, lhs;
enum rtx_code next_code;
int i, j;
rtx new_rtx = 0;
int i;
rtx tem;
const char *fmt;
bool equality_comparison = false;
/* PR rtl-optimization/70944. */
if (VECTOR_MODE_P (mode))
return x;
/* Select the code to be used in recursive calls. Once we are inside an
address, we stay there. If we have a comparison, set to COMPARE,
but once inside, go back to our default of SET. */
if (in_code == EQ)
{
equality_comparison = true;
in_code = COMPARE;
}
next_code = (code == MEM ? MEM
: ((code == COMPARE || COMPARISON_P (x))
&& XEXP (x, 1) == const0_rtx) ? COMPARE
: in_code == COMPARE ? SET : in_code);
/* Process depending on the code of this operation. If NEW is set
nonzero, it will be returned. */
@ -7804,8 +7796,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
an address. */
if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
&& INTVAL (XEXP (x, 1)) >= 0
&& SCALAR_INT_MODE_P (mode))
&& INTVAL (XEXP (x, 1)) >= 0)
{
HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
HOST_WIDE_INT multval = HOST_WIDE_INT_1 << count;
@ -7826,8 +7817,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
rhs = XEXP (x, 1);
lhs = make_compound_operation (lhs, next_code);
rhs = make_compound_operation (rhs, next_code);
if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG
&& SCALAR_INT_MODE_P (mode))
if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG)
{
tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
XEXP (lhs, 1));
@ -7846,22 +7836,20 @@ make_compound_operation (rtx x, enum rtx_code in_code)
{
SUBST (XEXP (x, 0), lhs);
SUBST (XEXP (x, 1), rhs);
goto maybe_swap;
}
x = gen_lowpart (mode, new_rtx);
goto maybe_swap;
maybe_swap_commutative_operands (x);
return x;
case MINUS:
lhs = XEXP (x, 0);
rhs = XEXP (x, 1);
lhs = make_compound_operation (lhs, next_code);
rhs = make_compound_operation (rhs, next_code);
if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG
&& SCALAR_INT_MODE_P (mode))
if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG)
{
tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
XEXP (rhs, 1));
new_rtx = simplify_gen_binary (PLUS, mode, tem, lhs);
return simplify_gen_binary (PLUS, mode, tem, lhs);
}
else if (GET_CODE (rhs) == MULT
&& (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
@ -7870,7 +7858,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
simplify_gen_unary (NEG, mode,
XEXP (rhs, 1),
mode));
new_rtx = simplify_gen_binary (PLUS, mode, tem, lhs);
return simplify_gen_binary (PLUS, mode, tem, lhs);
}
else
{
@ -7878,7 +7866,6 @@ make_compound_operation (rtx x, enum rtx_code in_code)
SUBST (XEXP (x, 1), rhs);
return x;
}
return gen_lowpart (mode, new_rtx);
case AND:
/* If the second operand is not a constant, we can't do anything
@ -8171,15 +8158,60 @@ make_compound_operation (rtx x, enum rtx_code in_code)
}
if (new_rtx)
*x_ptr = gen_lowpart (mode, new_rtx);
*next_code_ptr = next_code;
return NULL_RTX;
}
/* Look at the expression rooted at X. Look for expressions
equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
Form these expressions.
Return the new rtx, usually just X.
Also, for machines like the VAX that don't have logical shift insns,
try to convert logical to arithmetic shift operations in cases where
they are equivalent. This undoes the canonicalizations to logical
shifts done elsewhere.
We try, as much as possible, to re-use rtl expressions to save memory.
IN_CODE says what kind of expression we are processing. Normally, it is
SET. In a memory address it is MEM. When processing the arguments of
a comparison or a COMPARE against zero, it is COMPARE, or EQ if more
precisely it is an equality comparison against zero. */
rtx
make_compound_operation (rtx x, enum rtx_code in_code)
{
enum rtx_code code = GET_CODE (x);
const char *fmt;
int i, j;
enum rtx_code next_code;
rtx new_rtx, tem;
/* Select the code to be used in recursive calls. Once we are inside an
address, we stay there. If we have a comparison, set to COMPARE,
but once inside, go back to our default of SET. */
next_code = (code == MEM ? MEM
: ((code == COMPARE || COMPARISON_P (x))
&& XEXP (x, 1) == const0_rtx) ? COMPARE
: in_code == COMPARE || in_code == EQ ? SET : in_code);
if (SCALAR_INT_MODE_P (GET_MODE (x)))
{
x = gen_lowpart (mode, new_rtx);
rtx new_rtx = make_compound_operation_int (GET_MODE (x), &x,
in_code, &next_code);
if (new_rtx)
return new_rtx;
code = GET_CODE (x);
}
/* Now recursively process each operand of this operation. We need to
handle ZERO_EXTEND specially so that we don't lose track of the
inner mode. */
if (GET_CODE (x) == ZERO_EXTEND)
if (code == ZERO_EXTEND)
{
new_rtx = make_compound_operation (XEXP (x, 0), next_code);
tem = simplify_const_unary_operation (ZERO_EXTEND, GET_MODE (x),
@ -8204,17 +8236,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
SUBST (XVECEXP (x, i, j), new_rtx);
}
maybe_swap:
/* If this is a commutative operation, the changes to the operands
may have made it noncanonical. */
if (COMMUTATIVE_ARITH_P (x)
&& swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
{
tem = XEXP (x, 0);
SUBST (XEXP (x, 0), XEXP (x, 1));
SUBST (XEXP (x, 1), tem);
}
maybe_swap_commutative_operands (x);
return x;
}
@ -11220,16 +11242,7 @@ change_zero_ext (rtx pat)
if (changed)
FOR_EACH_SUBRTX_PTR (iter, array, src, NONCONST)
{
rtx x = **iter;
if (COMMUTATIVE_ARITH_P (x)
&& swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
{
rtx tem = XEXP (x, 0);
SUBST (XEXP (x, 0), XEXP (x, 1));
SUBST (XEXP (x, 1), tem);
}
}
maybe_swap_commutative_operands (**iter);
rtx *dst = &SET_DEST (pat);
if (GET_CODE (*dst) == ZERO_EXTRACT