[28/77] Use is_a <scalar_int_mode> for miscellaneous types of test

This patch adds is_a <scalar_int_mode> checks to various places
that were explicitly or implicitly restricted to integers already,
in cases where adding an explicit is_a <scalar_int_mode> is useful
for later patches.

In simplify_if_then_else, the:

  GET_MODE (XEXP (XEXP (t, 0), N))

expressions were equivalent to:

  GET_MODE (XEXP (t, 0))

due to the type of operation.

2017-08-30  Richard Sandiford  <richard.sandiford@linaro.org>
	    Alan Hayward  <alan.hayward@arm.com>
	    David Sherwood  <david.sherwood@arm.com>

gcc/
	* combine.c (sign_extend_short_imm): Add is_a <scalar_int_mode>
	checks.
	(try_combine): Likewise.
	(simplify_if_then_else): Likewise.
	* cse.c (cse_insn): Likewise.
	* dwarf2out.c (mem_loc_descriptor): Likewise.
	* emit-rtl.c (gen_lowpart_common): Likewise.
	* simplify-rtx.c (simplify_truncation): Likewise.
	(simplify_binary_operation_1): Likewise.
	(simplify_const_relational_operation): Likewise.
	(simplify_ternary_operation): Likewise.
	* tree-ssa-loop-ivopts.c (force_expr_to_var_cost): Likewise.

Co-Authored-By: Alan Hayward <alan.hayward@arm.com>
Co-Authored-By: David Sherwood <david.sherwood@arm.com>

From-SVN: r251480
This commit is contained in:
Richard Sandiford 2017-08-30 11:12:14 +00:00 committed by Richard Sandiford
parent 095a49c86a
commit 5465137750
7 changed files with 109 additions and 76 deletions

View File

@ -1,3 +1,20 @@
2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
* combine.c (sign_extend_short_imm): Add is_a <scalar_int_mode>
checks.
(try_combine): Likewise.
(simplify_if_then_else): Likewise.
* cse.c (cse_insn): Likewise.
* dwarf2out.c (mem_loc_descriptor): Likewise.
* emit-rtl.c (gen_lowpart_common): Likewise.
* simplify-rtx.c (simplify_truncation): Likewise.
(simplify_binary_operation_1): Likewise.
(simplify_const_relational_operation): Likewise.
(simplify_ternary_operation): Likewise.
* tree-ssa-loop-ivopts.c (force_expr_to_var_cost): Likewise.
2017-08-30 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>

View File

@ -1636,11 +1636,13 @@ setup_incoming_promotions (rtx_insn *first)
static rtx
sign_extend_short_imm (rtx src, machine_mode mode, unsigned int prec)
{
if (GET_MODE_PRECISION (mode) < prec
&& CONST_INT_P (src)
scalar_int_mode int_mode;
if (CONST_INT_P (src)
&& is_a <scalar_int_mode> (mode, &int_mode)
&& GET_MODE_PRECISION (int_mode) < prec
&& INTVAL (src) > 0
&& val_signbit_known_set_p (mode, INTVAL (src)))
src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (mode));
&& val_signbit_known_set_p (int_mode, INTVAL (src)))
src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (int_mode));
return src;
}
@ -3166,6 +3168,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
machine_mode compare_mode, orig_compare_mode;
enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
scalar_int_mode mode;
newpat = PATTERN (i3);
newpat_dest = SET_DEST (newpat);
@ -3176,8 +3179,9 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
&cc_use_insn)))
{
compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
compare_code = simplify_compare_const (compare_code,
GET_MODE (i2dest), op0, &op1);
if (is_a <scalar_int_mode> (GET_MODE (i2dest), &mode))
compare_code = simplify_compare_const (compare_code, mode,
op0, &op1);
target_canonicalize_comparison (&compare_code, &op0, &op1, 1);
}
@ -6288,7 +6292,7 @@ simplify_if_then_else (rtx x)
int i;
enum rtx_code false_code;
rtx reversed;
scalar_int_mode int_mode;
scalar_int_mode int_mode, inner_mode;
/* Simplify storing of the truth value. */
if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
@ -6500,6 +6504,7 @@ simplify_if_then_else (rtx x)
&& rtx_equal_p (XEXP (t, 1), f))
c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
else if (GET_CODE (t) == SIGN_EXTEND
&& is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
&& (GET_CODE (XEXP (t, 0)) == PLUS
|| GET_CODE (XEXP (t, 0)) == MINUS
|| GET_CODE (XEXP (t, 0)) == IOR
@ -6513,13 +6518,14 @@ simplify_if_then_else (rtx x)
&& (num_sign_bit_copies (f, GET_MODE (f))
> (unsigned int)
(GET_MODE_PRECISION (int_mode)
- GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 0))))))
- GET_MODE_PRECISION (inner_mode))))
{
c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = SIGN_EXTEND;
m = GET_MODE (XEXP (t, 0));
m = inner_mode;
}
else if (GET_CODE (t) == SIGN_EXTEND
&& is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
&& (GET_CODE (XEXP (t, 0)) == PLUS
|| GET_CODE (XEXP (t, 0)) == IOR
|| GET_CODE (XEXP (t, 0)) == XOR)
@ -6529,13 +6535,14 @@ simplify_if_then_else (rtx x)
&& (num_sign_bit_copies (f, GET_MODE (f))
> (unsigned int)
(GET_MODE_PRECISION (int_mode)
- GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 1))))))
- GET_MODE_PRECISION (inner_mode))))
{
c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = SIGN_EXTEND;
m = GET_MODE (XEXP (t, 0));
m = inner_mode;
}
else if (GET_CODE (t) == ZERO_EXTEND
&& is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
&& (GET_CODE (XEXP (t, 0)) == PLUS
|| GET_CODE (XEXP (t, 0)) == MINUS
|| GET_CODE (XEXP (t, 0)) == IOR
@ -6548,14 +6555,15 @@ simplify_if_then_else (rtx x)
&& subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
&& ((nonzero_bits (f, GET_MODE (f))
& ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
& ~GET_MODE_MASK (inner_mode))
== 0))
{
c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = ZERO_EXTEND;
m = GET_MODE (XEXP (t, 0));
m = inner_mode;
}
else if (GET_CODE (t) == ZERO_EXTEND
&& is_a <scalar_int_mode> (GET_MODE (XEXP (t, 0)), &inner_mode)
&& (GET_CODE (XEXP (t, 0)) == PLUS
|| GET_CODE (XEXP (t, 0)) == IOR
|| GET_CODE (XEXP (t, 0)) == XOR)
@ -6564,12 +6572,12 @@ simplify_if_then_else (rtx x)
&& subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
&& rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
&& ((nonzero_bits (f, GET_MODE (f))
& ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
& ~GET_MODE_MASK (inner_mode))
== 0))
{
c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
extend_op = ZERO_EXTEND;
m = GET_MODE (XEXP (t, 0));
m = inner_mode;
}
if (z)
@ -6613,17 +6621,17 @@ simplify_if_then_else (rtx x)
non-zero bit in A is C1. */
if (true_code == NE && XEXP (cond, 1) == const0_rtx
&& false_rtx == const0_rtx && CONST_INT_P (true_rtx)
&& INTEGRAL_MODE_P (GET_MODE (XEXP (cond, 0)))
&& (UINTVAL (true_rtx) & GET_MODE_MASK (mode))
== nonzero_bits (XEXP (cond, 0), GET_MODE (XEXP (cond, 0)))
&& (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
&& is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (GET_MODE (XEXP (cond, 0)), &inner_mode)
&& (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))
== nonzero_bits (XEXP (cond, 0), inner_mode)
&& (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (int_mode))) >= 0)
{
rtx val = XEXP (cond, 0);
machine_mode val_mode = GET_MODE (val);
if (val_mode == mode)
if (inner_mode == int_mode)
return val;
else if (GET_MODE_PRECISION (val_mode) < GET_MODE_PRECISION (mode))
return simplify_gen_unary (ZERO_EXTEND, mode, val, val_mode);
else if (GET_MODE_PRECISION (inner_mode) < GET_MODE_PRECISION (int_mode))
return simplify_gen_unary (ZERO_EXTEND, int_mode, val, inner_mode);
}
return x;

View File

@ -4878,13 +4878,14 @@ cse_insn (rtx_insn *insn)
value. */
if (flag_expensive_optimizations && ! src_related
&& is_a <scalar_int_mode> (mode, &int_mode)
&& GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
&& GET_MODE_SIZE (int_mode) < UNITS_PER_WORD)
{
machine_mode tmode;
rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
FOR_EACH_WIDER_MODE (tmode, mode)
FOR_EACH_WIDER_MODE (tmode, int_mode)
{
if (GET_MODE_SIZE (tmode) > UNITS_PER_WORD)
break;
@ -4905,7 +4906,7 @@ cse_insn (rtx_insn *insn)
if (REG_P (larger_elt->exp))
{
src_related
= gen_lowpart (mode, larger_elt->exp);
= gen_lowpart (int_mode, larger_elt->exp);
break;
}

View File

@ -14824,31 +14824,29 @@ mem_loc_descriptor (rtx rtl, machine_mode mode,
case SIGN_EXTEND:
case ZERO_EXTEND:
if (!is_a <scalar_int_mode> (mode, &int_mode))
if (!is_a <scalar_int_mode> (mode, &int_mode)
|| !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
break;
op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
mem_mode, VAR_INIT_STATUS_INITIALIZED);
if (op0 == 0)
break;
else if (GET_CODE (rtl) == ZERO_EXTEND
&& GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
&& GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
< HOST_BITS_PER_WIDE_INT
&& GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
/* If DW_OP_const{1,2,4}u won't be used, it is shorter
to expand zero extend as two shifts instead of
masking. */
&& GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= 4)
&& GET_MODE_SIZE (inner_mode) <= 4)
{
machine_mode imode = GET_MODE (XEXP (rtl, 0));
mem_loc_result = op0;
add_loc_descr (&mem_loc_result,
int_loc_descriptor (GET_MODE_MASK (imode)));
int_loc_descriptor (GET_MODE_MASK (inner_mode)));
add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
}
else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
{
int shift = DWARF2_ADDR_SIZE
- GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0)));
int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
shift *= BITS_PER_UNIT;
if (GET_CODE (rtl) == SIGN_EXTEND)
op = DW_OP_shra;
@ -14865,7 +14863,7 @@ mem_loc_descriptor (rtx rtl, machine_mode mode,
dw_die_ref type_die1, type_die2;
dw_loc_descr_ref cvt;
type_die1 = base_type_for_mode (GET_MODE (XEXP (rtl, 0)),
type_die1 = base_type_for_mode (inner_mode,
GET_CODE (rtl) == ZERO_EXTEND);
if (type_die1 == NULL)
break;
@ -15410,14 +15408,15 @@ mem_loc_descriptor (rtx rtl, machine_mode mode,
if (CONST_INT_P (XEXP (rtl, 1))
&& CONST_INT_P (XEXP (rtl, 2))
&& is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
&& GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
&& GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
&& ((unsigned) INTVAL (XEXP (rtl, 1))
+ (unsigned) INTVAL (XEXP (rtl, 2))
<= GET_MODE_BITSIZE (int_mode))
&& GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
&& GET_MODE_SIZE (GET_MODE (XEXP (rtl, 0))) <= DWARF2_ADDR_SIZE)
<= GET_MODE_BITSIZE (int_mode)))
{
int shift, size;
op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
mem_mode, VAR_INIT_STATUS_INITIALIZED);
if (op0 == 0)
break;
@ -15429,8 +15428,7 @@ mem_loc_descriptor (rtx rtl, machine_mode mode,
size = INTVAL (XEXP (rtl, 1));
shift = INTVAL (XEXP (rtl, 2));
if (BITS_BIG_ENDIAN)
shift = GET_MODE_BITSIZE (GET_MODE (XEXP (rtl, 0)))
- shift - size;
shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
if (shift + size != (int) DWARF2_ADDR_SIZE)
{
add_loc_descr (&mem_loc_result,

View File

@ -1430,9 +1430,11 @@ gen_lowpart_common (machine_mode mode, rtx x)
if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
return 0;
scalar_int_mode int_mode, int_innermode, from_mode;
if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
&& (GET_MODE_CLASS (mode) == MODE_INT
|| GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
&& is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (innermode, &int_innermode)
&& is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
{
/* If we are getting the low-order part of something that has been
sign- or zero-extended, we can either just use the object being
@ -1442,12 +1444,12 @@ gen_lowpart_common (machine_mode mode, rtx x)
This case is used mostly by combine and cse. */
if (GET_MODE (XEXP (x, 0)) == mode)
if (from_mode == int_mode)
return XEXP (x, 0);
else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
return gen_lowpart_common (mode, XEXP (x, 0));
else if (msize < xsize)
return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
return gen_lowpart_common (int_mode, XEXP (x, 0));
else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
}
else if (GET_CODE (x) == SUBREG || REG_P (x)
|| GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR

View File

@ -808,21 +808,22 @@ simplify_truncation (machine_mode mode, rtx op,
if the MEM has a mode-dependent address. */
if ((GET_CODE (op) == LSHIFTRT
|| GET_CODE (op) == ASHIFTRT)
&& is_a <scalar_int_mode> (mode, &int_mode)
&& is_a <scalar_int_mode> (op_mode, &int_op_mode)
&& MEM_P (XEXP (op, 0))
&& CONST_INT_P (XEXP (op, 1))
&& (INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (mode)) == 0
&& INTVAL (XEXP (op, 1)) % GET_MODE_BITSIZE (int_mode) == 0
&& INTVAL (XEXP (op, 1)) > 0
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (int_op_mode)
&& ! mode_dependent_address_p (XEXP (XEXP (op, 0), 0),
MEM_ADDR_SPACE (XEXP (op, 0)))
&& ! MEM_VOLATILE_P (XEXP (op, 0))
&& (GET_MODE_SIZE (mode) >= UNITS_PER_WORD
&& (GET_MODE_SIZE (int_mode) >= UNITS_PER_WORD
|| WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN))
{
int byte = subreg_lowpart_offset (mode, int_op_mode);
int byte = subreg_lowpart_offset (int_mode, int_op_mode);
int shifted_bytes = INTVAL (XEXP (op, 1)) / BITS_PER_UNIT;
return adjust_address_nv (XEXP (op, 0), mode,
return adjust_address_nv (XEXP (op, 0), int_mode,
(WORDS_BIG_ENDIAN
? byte - shifted_bytes
: byte + shifted_bytes));
@ -2989,19 +2990,21 @@ simplify_binary_operation_1 (enum rtx_code code, machine_mode mode,
is (lt foo (const_int 0)), so we can perform the above
simplification if STORE_FLAG_VALUE is 1. */
if (STORE_FLAG_VALUE == 1
if (is_a <scalar_int_mode> (mode, &int_mode)
&& STORE_FLAG_VALUE == 1
&& trueop1 == const1_rtx
&& GET_CODE (op0) == LSHIFTRT
&& CONST_INT_P (XEXP (op0, 1))
&& INTVAL (XEXP (op0, 1)) == GET_MODE_PRECISION (mode) - 1)
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
&& INTVAL (XEXP (op0, 1)) == GET_MODE_PRECISION (int_mode) - 1)
return gen_rtx_GE (int_mode, XEXP (op0, 0), const0_rtx);
/* (xor (comparison foo bar) (const_int sign-bit))
when STORE_FLAG_VALUE is the sign bit. */
if (val_signbit_p (mode, STORE_FLAG_VALUE)
if (is_a <scalar_int_mode> (mode, &int_mode)
&& val_signbit_p (int_mode, STORE_FLAG_VALUE)
&& trueop1 == const_true_rtx
&& COMPARISON_P (op0)
&& (reversed = reversed_comparison (op0, mode)))
&& (reversed = reversed_comparison (op0, int_mode)))
return reversed;
tem = simplify_byte_swapping_operation (code, mode, op0, op1);
@ -3424,17 +3427,17 @@ simplify_binary_operation_1 (enum rtx_code code, machine_mode mode,
return op0;
/* Optimize (lshiftrt (clz X) C) as (eq X 0). */
if (GET_CODE (op0) == CLZ
&& is_a <scalar_int_mode> (GET_MODE (XEXP (op0, 0)), &inner_mode)
&& CONST_INT_P (trueop1)
&& STORE_FLAG_VALUE == 1
&& INTVAL (trueop1) < (HOST_WIDE_INT)width)
{
machine_mode imode = GET_MODE (XEXP (op0, 0));
unsigned HOST_WIDE_INT zero_val = 0;
if (CLZ_DEFINED_VALUE_AT_ZERO (imode, zero_val)
&& zero_val == GET_MODE_PRECISION (imode)
if (CLZ_DEFINED_VALUE_AT_ZERO (inner_mode, zero_val)
&& zero_val == GET_MODE_PRECISION (inner_mode)
&& INTVAL (trueop1) == exact_log2 (zero_val))
return simplify_gen_relational (EQ, mode, imode,
return simplify_gen_relational (EQ, mode, inner_mode,
XEXP (op0, 0), const0_rtx);
}
goto canonicalize_shift;
@ -5275,7 +5278,9 @@ simplify_const_relational_operation (enum rtx_code code,
}
/* Optimize integer comparisons with zero. */
if (trueop1 == const0_rtx && !side_effects_p (trueop0))
if (is_a <scalar_int_mode> (mode, &int_mode)
&& trueop1 == const0_rtx
&& !side_effects_p (trueop0))
{
/* Some addresses are known to be nonzero. We don't know
their sign, but equality comparisons are known. */
@ -5294,7 +5299,7 @@ simplify_const_relational_operation (enum rtx_code code,
rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
{
int sign_bitnum = GET_MODE_PRECISION (mode) - 1;
int sign_bitnum = GET_MODE_PRECISION (int_mode) - 1;
int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
&& (UINTVAL (inner_const)
& (HOST_WIDE_INT_1U
@ -5410,13 +5415,9 @@ simplify_ternary_operation (enum rtx_code code, machine_mode mode,
machine_mode op0_mode, rtx op0, rtx op1,
rtx op2)
{
unsigned int width = GET_MODE_PRECISION (mode);
bool any_change = false;
rtx tem, trueop2;
/* VOIDmode means "infinite" precision. */
if (width == 0)
width = HOST_BITS_PER_WIDE_INT;
scalar_int_mode int_mode, int_op0_mode;
switch (code)
{
@ -5450,17 +5451,21 @@ simplify_ternary_operation (enum rtx_code code, machine_mode mode,
if (CONST_INT_P (op0)
&& CONST_INT_P (op1)
&& CONST_INT_P (op2)
&& ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
&& width <= (unsigned) HOST_BITS_PER_WIDE_INT)
&& is_a <scalar_int_mode> (mode, &int_mode)
&& INTVAL (op1) + INTVAL (op2) <= GET_MODE_PRECISION (int_mode)
&& HWI_COMPUTABLE_MODE_P (int_mode))
{
/* Extracting a bit-field from a constant */
unsigned HOST_WIDE_INT val = UINTVAL (op0);
HOST_WIDE_INT op1val = INTVAL (op1);
HOST_WIDE_INT op2val = INTVAL (op2);
if (BITS_BIG_ENDIAN)
val >>= GET_MODE_PRECISION (op0_mode) - op2val - op1val;
else
if (!BITS_BIG_ENDIAN)
val >>= op2val;
else if (is_a <scalar_int_mode> (op0_mode, &int_op0_mode))
val >>= GET_MODE_PRECISION (int_op0_mode) - op2val - op1val;
else
/* Not enough information to calculate the bit position. */
break;
if (HOST_BITS_PER_WIDE_INT != op1val)
{
@ -5473,7 +5478,7 @@ simplify_ternary_operation (enum rtx_code code, machine_mode mode,
val |= ~ ((HOST_WIDE_INT_1U << op1val) - 1);
}
return gen_int_mode (val, mode);
return gen_int_mode (val, int_mode);
}
break;

View File

@ -4011,6 +4011,7 @@ force_expr_to_var_cost (tree expr, bool speed)
tree op0, op1;
comp_cost cost0, cost1, cost;
machine_mode mode;
scalar_int_mode int_mode;
if (!costs_initialized)
{
@ -4133,8 +4134,9 @@ force_expr_to_var_cost (tree expr, bool speed)
mult = op0;
if (mult != NULL_TREE
&& is_a <scalar_int_mode> (mode, &int_mode)
&& cst_and_fits_in_hwi (TREE_OPERAND (mult, 1))
&& get_shiftadd_cost (expr, mode, cost0, cost1, mult,
&& get_shiftadd_cost (expr, int_mode, cost0, cost1, mult,
speed, &sa_cost))
return sa_cost;
}