alias.c: Use REG_P...
2009-06-22 Shujing Zhao <pearly.zhao@oracle.com> * alias.c: Use REG_P, MEM_P, CONST_INT_P, LABEL_P, CALL_P, NOTE_P and JUMP_TABLE_DATA_P predicates where applicable. * auto-inc-dec.c: Ditto. * builtins.c: Ditto. * caller-save.c: Ditto. * calls.c: Ditto. * cfgcleanup.c: Ditto. * cfglayout.c: Ditto. * cfgrtl.c: Ditto. * combine.c: Ditto. * combine-stack-adj.c: Ditto. * cse.c: Ditto. * cselib.c: Ditto. * dbxout.c: Ditto. * df-scan.c: Ditto. * dse.c: Ditto. * dwarf2asm.c: Ditto. * dwarf2out.c: Ditto. * emit-rtl.c: Ditto. * except.c: Ditto. * explow.c: Ditto. * expmed.c: Ditto. * expr.c: Ditto. * final.c: Ditto. * function.c: Ditto. * fwprop.c: Ditto. * gcse.c: Ditto. * genpreds.c: Ditto. * genrecog.c: Ditto. * ifcvt.c: Ditto. * ira-costs.c: Ditto. * ira-lives.c: Ditto. * jump.c: Ditto. * loop-iv.c: Ditto. * lower-subreg.c: Ditto. * modulo-sched.c: Ditto. * optabs.c: Ditto. * postreload.c: Ditto. * print-rtl.c: Ditto. * recog.c: Ditto. * reginfo.c: Ditto. * regmove.c: Ditto. * reload1.c: Ditto. * reload.c: Ditto. * reorg.c: Ditto. * rtlanal.c: Ditto. * rtl.c: Ditto. * sched-vis.c: Ditto. * sdbout.c: Ditto. * sel-sched-ir.c: Ditto. * simplify-rtx.c: Ditto. * targhooks.c: Ditto. * var-tracking.c: Ditto. * vmsdbgout.c: Ditto. From-SVN: r148786
This commit is contained in:
parent
eee37e909a
commit
481683e1d5
@ -1,3 +1,60 @@
|
||||
2009-06-22 Shujing Zhao <pearly.zhao@oracle.com>
|
||||
|
||||
* alias.c: Use REG_P, MEM_P, CONST_INT_P, LABEL_P, CALL_P, NOTE_P and
|
||||
JUMP_TABLE_DATA_P predicates where applicable.
|
||||
* auto-inc-dec.c: Ditto.
|
||||
* builtins.c: Ditto.
|
||||
* caller-save.c: Ditto.
|
||||
* calls.c: Ditto.
|
||||
* cfgcleanup.c: Ditto.
|
||||
* cfglayout.c: Ditto.
|
||||
* cfgrtl.c: Ditto.
|
||||
* combine.c: Ditto.
|
||||
* combine-stack-adj.c: Ditto.
|
||||
* cse.c: Ditto.
|
||||
* cselib.c: Ditto.
|
||||
* dbxout.c: Ditto.
|
||||
* df-scan.c: Ditto.
|
||||
* dse.c: Ditto.
|
||||
* dwarf2asm.c: Ditto.
|
||||
* dwarf2out.c: Ditto.
|
||||
* emit-rtl.c: Ditto.
|
||||
* except.c: Ditto.
|
||||
* explow.c: Ditto.
|
||||
* expmed.c: Ditto.
|
||||
* expr.c: Ditto.
|
||||
* final.c: Ditto.
|
||||
* function.c: Ditto.
|
||||
* fwprop.c: Ditto.
|
||||
* gcse.c: Ditto.
|
||||
* genpreds.c: Ditto.
|
||||
* genrecog.c: Ditto.
|
||||
* ifcvt.c: Ditto.
|
||||
* ira-costs.c: Ditto.
|
||||
* ira-lives.c: Ditto.
|
||||
* jump.c: Ditto.
|
||||
* loop-iv.c: Ditto.
|
||||
* lower-subreg.c: Ditto.
|
||||
* modulo-sched.c: Ditto.
|
||||
* optabs.c: Ditto.
|
||||
* postreload.c: Ditto.
|
||||
* print-rtl.c: Ditto.
|
||||
* recog.c: Ditto.
|
||||
* reginfo.c: Ditto.
|
||||
* regmove.c: Ditto.
|
||||
* reload1.c: Ditto.
|
||||
* reload.c: Ditto.
|
||||
* reorg.c: Ditto.
|
||||
* rtlanal.c: Ditto.
|
||||
* rtl.c: Ditto.
|
||||
* sched-vis.c: Ditto.
|
||||
* sdbout.c: Ditto.
|
||||
* sel-sched-ir.c: Ditto.
|
||||
* simplify-rtx.c: Ditto.
|
||||
* targhooks.c: Ditto.
|
||||
* var-tracking.c: Ditto.
|
||||
* vmsdbgout.c: Ditto.
|
||||
|
||||
2009-06-22 Matthias Klose <doko@ubuntu.com>
|
||||
|
||||
* Makefile.in (install-plugin): Always use DESTDIR.
|
||||
|
42
gcc/alias.c
42
gcc/alias.c
@ -1018,9 +1018,9 @@ find_base_value (rtx src)
|
||||
/* Guess which operand is the base address:
|
||||
If either operand is a symbol, then it is the base. If
|
||||
either operand is a CONST_INT, then the other is the base. */
|
||||
if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
|
||||
if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
|
||||
return find_base_value (src_0);
|
||||
else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
|
||||
else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
|
||||
return find_base_value (src_1);
|
||||
|
||||
return 0;
|
||||
@ -1034,7 +1034,7 @@ find_base_value (rtx src)
|
||||
case AND:
|
||||
/* If the second operand is constant set the base
|
||||
address to the first operand. */
|
||||
if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
|
||||
if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
|
||||
return find_base_value (XEXP (src, 0));
|
||||
return 0;
|
||||
|
||||
@ -1176,7 +1176,7 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
|
||||
break;
|
||||
}
|
||||
case AND:
|
||||
if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
|
||||
if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
|
||||
new_reg_base_value[regno] = 0;
|
||||
break;
|
||||
default:
|
||||
@ -1269,9 +1269,9 @@ canon_rtx (rtx x)
|
||||
|
||||
if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
|
||||
{
|
||||
if (GET_CODE (x0) == CONST_INT)
|
||||
if (CONST_INT_P (x0))
|
||||
return plus_constant (x1, INTVAL (x0));
|
||||
else if (GET_CODE (x1) == CONST_INT)
|
||||
else if (CONST_INT_P (x1))
|
||||
return plus_constant (x0, INTVAL (x1));
|
||||
return gen_rtx_PLUS (GET_MODE (x), x0, x1);
|
||||
}
|
||||
@ -1553,7 +1553,7 @@ find_base_term (rtx x)
|
||||
}
|
||||
|
||||
case AND:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
|
||||
if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
|
||||
return find_base_term (XEXP (x, 0));
|
||||
return 0;
|
||||
|
||||
@ -1616,11 +1616,11 @@ base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
|
||||
if (GET_CODE (x) == AND && GET_CODE (y) == AND)
|
||||
return 1;
|
||||
if (GET_CODE (x) == AND
|
||||
&& (GET_CODE (XEXP (x, 1)) != CONST_INT
|
||||
&& (!CONST_INT_P (XEXP (x, 1))
|
||||
|| (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
|
||||
return 1;
|
||||
if (GET_CODE (y) == AND
|
||||
&& (GET_CODE (XEXP (y, 1)) != CONST_INT
|
||||
&& (!CONST_INT_P (XEXP (y, 1))
|
||||
|| (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
|
||||
return 1;
|
||||
|
||||
@ -1781,21 +1781,21 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
return memrefs_conflict_p (xsize, x0, ysize, y0, c);
|
||||
if (rtx_equal_for_memref_p (x0, y0))
|
||||
return memrefs_conflict_p (xsize, x1, ysize, y1, c);
|
||||
if (GET_CODE (x1) == CONST_INT)
|
||||
if (CONST_INT_P (x1))
|
||||
{
|
||||
if (GET_CODE (y1) == CONST_INT)
|
||||
if (CONST_INT_P (y1))
|
||||
return memrefs_conflict_p (xsize, x0, ysize, y0,
|
||||
c - INTVAL (x1) + INTVAL (y1));
|
||||
else
|
||||
return memrefs_conflict_p (xsize, x0, ysize, y,
|
||||
c - INTVAL (x1));
|
||||
}
|
||||
else if (GET_CODE (y1) == CONST_INT)
|
||||
else if (CONST_INT_P (y1))
|
||||
return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
|
||||
|
||||
return 1;
|
||||
}
|
||||
else if (GET_CODE (x1) == CONST_INT)
|
||||
else if (CONST_INT_P (x1))
|
||||
return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
|
||||
}
|
||||
else if (GET_CODE (y) == PLUS)
|
||||
@ -1805,7 +1805,7 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
rtx y0 = XEXP (y, 0);
|
||||
rtx y1 = XEXP (y, 1);
|
||||
|
||||
if (GET_CODE (y1) == CONST_INT)
|
||||
if (CONST_INT_P (y1))
|
||||
return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
|
||||
else
|
||||
return 1;
|
||||
@ -1831,7 +1831,7 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
|| (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
|
||||
|
||||
/* Can't properly adjust our sizes. */
|
||||
if (GET_CODE (x1) != CONST_INT)
|
||||
if (!CONST_INT_P (x1))
|
||||
return 1;
|
||||
xsize /= INTVAL (x1);
|
||||
ysize /= INTVAL (x1);
|
||||
@ -1847,13 +1847,13 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
as an access with indeterminate size. Assume that references
|
||||
besides AND are aligned, so if the size of the other reference is
|
||||
at least as large as the alignment, assume no other overlap. */
|
||||
if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
|
||||
{
|
||||
if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
|
||||
xsize = -1;
|
||||
return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
|
||||
}
|
||||
if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
|
||||
if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
|
||||
{
|
||||
/* ??? If we are indexing far enough into the array/structure, we
|
||||
may yet be able to determine that we can not overlap. But we
|
||||
@ -1866,7 +1866,7 @@ memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
|
||||
|
||||
if (CONSTANT_P (x))
|
||||
{
|
||||
if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
|
||||
if (CONST_INT_P (x) && CONST_INT_P (y))
|
||||
{
|
||||
c += (INTVAL (y) - INTVAL (x));
|
||||
return (xsize <= 0 || ysize <= 0
|
||||
@ -2169,11 +2169,11 @@ nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
|
||||
we can avoid overlap is if we can deduce that they are nonoverlapping
|
||||
pieces of that decl, which is very rare. */
|
||||
basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
|
||||
if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
|
||||
if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
|
||||
offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
|
||||
|
||||
basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
|
||||
if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
|
||||
if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
|
||||
offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
|
||||
|
||||
/* If the bases are different, we know they do not overlap if both
|
||||
@ -2627,7 +2627,7 @@ init_alias_analysis (void)
|
||||
&& GET_CODE (src) == PLUS
|
||||
&& REG_P (XEXP (src, 0))
|
||||
&& (t = get_reg_known_value (REGNO (XEXP (src, 0))))
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
{
|
||||
t = plus_constant (t, INTVAL (XEXP (src, 1)));
|
||||
set_reg_known_value (regno, t);
|
||||
|
@ -813,7 +813,7 @@ parse_add_or_inc (rtx insn, bool before_mem)
|
||||
else
|
||||
inc_insn.form = before_mem ? FORM_PRE_ADD : FORM_POST_ADD;
|
||||
|
||||
if (GET_CODE (XEXP (SET_SRC (pat), 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (SET_SRC (pat), 1)))
|
||||
{
|
||||
/* Process a = b + c where c is a const. */
|
||||
inc_insn.reg1_is_const = true;
|
||||
@ -891,7 +891,7 @@ find_address (rtx *address_of_x)
|
||||
mem_insn.reg0 = inc_insn.reg_res;
|
||||
mem_insn.reg1 = b;
|
||||
mem_insn.reg1_is_const = inc_insn.reg1_is_const;
|
||||
if (GET_CODE (b) == CONST_INT)
|
||||
if (CONST_INT_P (b))
|
||||
{
|
||||
/* Match with *(reg0 + reg1) where reg1 is a const. */
|
||||
HOST_WIDE_INT val = INTVAL (b);
|
||||
@ -1279,7 +1279,7 @@ find_mem (rtx *address_of_x)
|
||||
mem_insn.mem_loc = address_of_x;
|
||||
mem_insn.reg0 = XEXP (XEXP (x, 0), 0);
|
||||
mem_insn.reg1 = reg1;
|
||||
if (GET_CODE (reg1) == CONST_INT)
|
||||
if (CONST_INT_P (reg1))
|
||||
{
|
||||
mem_insn.reg1_is_const = true;
|
||||
/* Match with *(reg0 + c) where c is a const. */
|
||||
|
@ -1169,7 +1169,7 @@ get_memory_rtx (tree exp, tree len)
|
||||
gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
|
||||
|
||||
if (MEM_OFFSET (mem)
|
||||
&& GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
|
||||
&& CONST_INT_P (MEM_OFFSET (mem)))
|
||||
offset = INTVAL (MEM_OFFSET (mem));
|
||||
|
||||
if (offset >= 0 && len && host_integerp (len, 0))
|
||||
@ -1532,7 +1532,7 @@ expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
|
||||
|
||||
dest = virtual_outgoing_args_rtx;
|
||||
#ifndef STACK_GROWS_DOWNWARD
|
||||
if (GET_CODE (argsize) == CONST_INT)
|
||||
if (CONST_INT_P (argsize))
|
||||
dest = plus_constant (dest, -INTVAL (argsize));
|
||||
else
|
||||
dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
|
||||
@ -3403,7 +3403,7 @@ expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
|
||||
by pieces, we can avoid loading the string from memory
|
||||
and only stored the computed constants. */
|
||||
if (src_str
|
||||
&& GET_CODE (len_rtx) == CONST_INT
|
||||
&& CONST_INT_P (len_rtx)
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
|
||||
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
|
||||
CONST_CAST (char *, src_str),
|
||||
@ -3521,7 +3521,7 @@ expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
|
||||
by pieces, we can avoid loading the string from memory
|
||||
and only stored the computed constants. */
|
||||
if (src_str
|
||||
&& GET_CODE (len_rtx) == CONST_INT
|
||||
&& CONST_INT_P (len_rtx)
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
|
||||
&& can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
|
||||
CONST_CAST (char *, src_str),
|
||||
@ -3538,7 +3538,7 @@ expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
|
||||
return dest_mem;
|
||||
}
|
||||
|
||||
if (GET_CODE (len_rtx) == CONST_INT
|
||||
if (CONST_INT_P (len_rtx)
|
||||
&& can_move_by_pieces (INTVAL (len_rtx),
|
||||
MIN (dest_align, src_align)))
|
||||
{
|
||||
@ -3786,7 +3786,7 @@ expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
|
||||
{
|
||||
rtx len_rtx = expand_normal (len);
|
||||
|
||||
if (GET_CODE (len_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (len_rtx))
|
||||
{
|
||||
ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
|
||||
dst, src, target, mode);
|
||||
@ -4194,7 +4194,7 @@ expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
|
||||
arg3_rtx = expand_normal (fold_convert (sizetype, len));
|
||||
|
||||
/* Set MEM_SIZE as appropriate. */
|
||||
if (GET_CODE (arg3_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (arg3_rtx))
|
||||
{
|
||||
set_mem_size (arg1_rtx, arg3_rtx);
|
||||
set_mem_size (arg2_rtx, arg3_rtx);
|
||||
|
@ -471,7 +471,7 @@ setup_save_areas (void)
|
||||
{
|
||||
insn = chain->insn;
|
||||
next = chain->next;
|
||||
if (GET_CODE (insn) != CALL_INSN
|
||||
if (!CALL_P (insn)
|
||||
|| find_reg_note (insn, REG_NORETURN, NULL))
|
||||
continue;
|
||||
freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
|
||||
@ -531,7 +531,7 @@ setup_save_areas (void)
|
||||
call_saved_regs_num = 0;
|
||||
insn = chain->insn;
|
||||
next = chain->next;
|
||||
if (GET_CODE (insn) != CALL_INSN
|
||||
if (!CALL_P (insn)
|
||||
|| find_reg_note (insn, REG_NORETURN, NULL))
|
||||
continue;
|
||||
REG_SET_TO_HARD_REG_SET (hard_regs_to_save,
|
||||
|
12
gcc/calls.c
12
gcc/calls.c
@ -1431,7 +1431,7 @@ compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals
|
||||
&& args[i].partial == 0)
|
||||
continue;
|
||||
|
||||
if (GET_CODE (offset) == CONST_INT)
|
||||
if (CONST_INT_P (offset))
|
||||
addr = plus_constant (arg_reg, INTVAL (offset));
|
||||
else
|
||||
addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
|
||||
@ -1458,14 +1458,14 @@ compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals
|
||||
boundary = args[i].locate.boundary;
|
||||
if (args[i].locate.where_pad != downward)
|
||||
align = boundary;
|
||||
else if (GET_CODE (offset) == CONST_INT)
|
||||
else if (CONST_INT_P (offset))
|
||||
{
|
||||
align = INTVAL (offset) * BITS_PER_UNIT | boundary;
|
||||
align = align & -align;
|
||||
}
|
||||
set_mem_align (args[i].stack, align);
|
||||
|
||||
if (GET_CODE (slot_offset) == CONST_INT)
|
||||
if (CONST_INT_P (slot_offset))
|
||||
addr = plus_constant (arg_reg, INTVAL (slot_offset));
|
||||
else
|
||||
addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
|
||||
@ -1549,7 +1549,7 @@ mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
|
||||
i = 0;
|
||||
else if (GET_CODE (addr) == PLUS
|
||||
&& XEXP (addr, 0) == crtl->args.internal_arg_pointer
|
||||
&& GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (addr, 1)))
|
||||
i = INTVAL (XEXP (addr, 1));
|
||||
/* Return true for arg pointer based indexed addressing. */
|
||||
else if (GET_CODE (addr) == PLUS
|
||||
@ -4264,7 +4264,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags,
|
||||
|| (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) ==
|
||||
crtl->args.internal_arg_pointer
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))))
|
||||
{
|
||||
if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
|
||||
i = INTVAL (XEXP (XEXP (x, 0), 1));
|
||||
@ -4272,7 +4272,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags,
|
||||
/* expand_call should ensure this. */
|
||||
gcc_assert (!arg->locate.offset.var
|
||||
&& arg->locate.size.var == 0
|
||||
&& GET_CODE (size_rtx) == CONST_INT);
|
||||
&& CONST_INT_P (size_rtx));
|
||||
|
||||
if (arg->locate.offset.constant > i)
|
||||
{
|
||||
|
@ -2064,9 +2064,7 @@ delete_dead_jumptables (void)
|
||||
next = NEXT_INSN (insn);
|
||||
if (LABEL_P (insn)
|
||||
&& LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
|
||||
&& JUMP_P (next)
|
||||
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
|
||||
&& JUMP_TABLE_DATA_P (next))
|
||||
{
|
||||
rtx label = insn, jump = next;
|
||||
|
||||
|
@ -112,9 +112,7 @@ skip_insns_after_block (basic_block bb)
|
||||
|
||||
case CODE_LABEL:
|
||||
if (NEXT_INSN (insn)
|
||||
&& JUMP_P (NEXT_INSN (insn))
|
||||
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
|
||||
&& JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
|
||||
{
|
||||
insn = NEXT_INSN (insn);
|
||||
last_insn = insn;
|
||||
@ -585,9 +583,7 @@ reemit_insn_block_notes (void)
|
||||
tree this_block;
|
||||
|
||||
/* Avoid putting scope notes between jump table and its label. */
|
||||
if (JUMP_P (insn)
|
||||
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
|
||||
if (JUMP_TABLE_DATA_P (insn))
|
||||
continue;
|
||||
|
||||
this_block = insn_scope (insn);
|
||||
|
@ -170,9 +170,7 @@ delete_insn (rtx insn)
|
||||
remove_note (insn, note);
|
||||
}
|
||||
|
||||
if (JUMP_P (insn)
|
||||
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
|
||||
if (JUMP_TABLE_DATA_P (insn))
|
||||
{
|
||||
rtx pat = PATTERN (insn);
|
||||
int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
|
||||
@ -2124,9 +2122,7 @@ rtl_verify_flow_info (void)
|
||||
case CODE_LABEL:
|
||||
/* An addr_vec is placed outside any basic block. */
|
||||
if (NEXT_INSN (x)
|
||||
&& JUMP_P (NEXT_INSN (x))
|
||||
&& (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
|
||||
|| GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
|
||||
&& JUMP_TABLE_DATA_P (NEXT_INSN (x)))
|
||||
x = NEXT_INSN (x);
|
||||
|
||||
/* But in any case, non-deletable labels can appear anywhere. */
|
||||
|
@ -116,7 +116,7 @@ stack_memref_p (rtx x)
|
||||
return 1;
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& XEXP (x, 0) == stack_pointer_rtx
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
@ -298,7 +298,7 @@ adjust_frame_related_expr (rtx last_sp_set, rtx insn,
|
||||
&& SET_DEST (last) == stack_pointer_rtx
|
||||
&& GET_CODE (SET_SRC (last)) == PLUS
|
||||
&& XEXP (SET_SRC (last), 0) == stack_pointer_rtx
|
||||
&& GET_CODE (XEXP (SET_SRC (last), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (SET_SRC (last), 1)))
|
||||
{
|
||||
XEXP (SET_SRC (last), 1)
|
||||
= GEN_INT (INTVAL (XEXP (SET_SRC (last), 1)) + this_adjust);
|
||||
@ -366,7 +366,7 @@ combine_stack_adjustments_for_block (basic_block bb)
|
||||
if (dest == stack_pointer_rtx
|
||||
&& GET_CODE (src) == PLUS
|
||||
&& XEXP (src, 0) == stack_pointer_rtx
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
{
|
||||
HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
|
||||
|
||||
|
250
gcc/combine.c
250
gcc/combine.c
@ -673,7 +673,7 @@ do_SUBST (rtx *into, rtx newval)
|
||||
little gain doing the checks here. Focus on catching invalid
|
||||
transformations involving integer constants. */
|
||||
if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
|
||||
&& GET_CODE (newval) == CONST_INT)
|
||||
&& CONST_INT_P (newval))
|
||||
{
|
||||
/* Sanity check that we're replacing oldval with a CONST_INT
|
||||
that is a valid sign-extension for the original mode. */
|
||||
@ -687,9 +687,9 @@ do_SUBST (rtx *into, rtx newval)
|
||||
perform this test on oldval instead, checking whether an
|
||||
invalid replacement took place before we got here. */
|
||||
gcc_assert (!(GET_CODE (oldval) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
|
||||
&& CONST_INT_P (SUBREG_REG (oldval))));
|
||||
gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
|
||||
&& GET_CODE (XEXP (oldval, 0)) == CONST_INT));
|
||||
&& CONST_INT_P (XEXP (oldval, 0))));
|
||||
}
|
||||
|
||||
if (undobuf.frees)
|
||||
@ -1490,7 +1490,7 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
|
||||
instead of this kludge. */
|
||||
|
||||
if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
|
||||
&& GET_CODE (src) == CONST_INT
|
||||
&& CONST_INT_P (src)
|
||||
&& INTVAL (src) > 0
|
||||
&& 0 != (INTVAL (src)
|
||||
& ((HOST_WIDE_INT) 1
|
||||
@ -1962,7 +1962,7 @@ contains_muldiv (rtx x)
|
||||
return 1;
|
||||
|
||||
case MULT:
|
||||
return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
return ! (CONST_INT_P (XEXP (x, 1))
|
||||
&& exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
|
||||
default:
|
||||
if (BINARY_P (x))
|
||||
@ -2346,10 +2346,10 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
||||
constant. */
|
||||
if (i1 == 0
|
||||
&& (temp = single_set (i2)) != 0
|
||||
&& (GET_CODE (SET_SRC (temp)) == CONST_INT
|
||||
&& (CONST_INT_P (SET_SRC (temp))
|
||||
|| GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
|
||||
&& GET_CODE (PATTERN (i3)) == SET
|
||||
&& (GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT
|
||||
&& (CONST_INT_P (SET_SRC (PATTERN (i3)))
|
||||
|| GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
|
||||
&& reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
|
||||
{
|
||||
@ -2359,8 +2359,8 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
||||
|
||||
if (GET_CODE (dest) == ZERO_EXTRACT)
|
||||
{
|
||||
if (GET_CODE (XEXP (dest, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (dest, 2)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (dest, 1))
|
||||
&& CONST_INT_P (XEXP (dest, 2)))
|
||||
{
|
||||
width = INTVAL (XEXP (dest, 1));
|
||||
offset = INTVAL (XEXP (dest, 2));
|
||||
@ -2400,7 +2400,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
||||
rtx inner = SET_SRC (PATTERN (i3));
|
||||
rtx outer = SET_SRC (temp);
|
||||
|
||||
if (GET_CODE (outer) == CONST_INT)
|
||||
if (CONST_INT_P (outer))
|
||||
{
|
||||
olo = INTVAL (outer);
|
||||
ohi = olo < 0 ? -1 : 0;
|
||||
@ -2411,7 +2411,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
||||
ohi = CONST_DOUBLE_HIGH (outer);
|
||||
}
|
||||
|
||||
if (GET_CODE (inner) == CONST_INT)
|
||||
if (CONST_INT_P (inner))
|
||||
{
|
||||
ilo = INTVAL (inner);
|
||||
ihi = ilo < 0 ? -1 : 0;
|
||||
@ -3109,7 +3109,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
||||
an ASHIFT. This can occur if it was inside a PLUS and hence
|
||||
appeared to be a memory address. This is a kludge. */
|
||||
if (split_code == MULT
|
||||
&& GET_CODE (XEXP (*split, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (*split, 1))
|
||||
&& INTVAL (XEXP (*split, 1)) > 0
|
||||
&& (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
|
||||
{
|
||||
@ -3917,7 +3917,7 @@ find_split_point (rtx *loc, rtx insn)
|
||||
the first pseudo-reg (one of the virtual regs) as a placeholder;
|
||||
it will not remain in the result. */
|
||||
if (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
|
||||
{
|
||||
rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
|
||||
@ -4019,9 +4019,9 @@ find_split_point (rtx *loc, rtx insn)
|
||||
if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
|
||||
&& (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
|
||||
<= HOST_BITS_PER_WIDE_INT)
|
||||
&& GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
|
||||
&& GET_CODE (SET_SRC (x)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (SET_DEST (x), 1))
|
||||
&& CONST_INT_P (XEXP (SET_DEST (x), 2))
|
||||
&& CONST_INT_P (SET_SRC (x))
|
||||
&& ((INTVAL (XEXP (SET_DEST (x), 1))
|
||||
+ INTVAL (XEXP (SET_DEST (x), 2)))
|
||||
<= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
|
||||
@ -4075,7 +4075,7 @@ find_split_point (rtx *loc, rtx insn)
|
||||
this is no worse, but if it took more than one insn, it will
|
||||
be better. */
|
||||
|
||||
if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (SET_SRC (x), 1))
|
||||
&& REG_P (XEXP (SET_SRC (x), 0))
|
||||
&& (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
|
||||
&& REG_P (SET_DEST (x))
|
||||
@ -4135,8 +4135,8 @@ find_split_point (rtx *loc, rtx insn)
|
||||
|
||||
case SIGN_EXTRACT:
|
||||
case ZERO_EXTRACT:
|
||||
if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (SET_SRC (x), 1))
|
||||
&& CONST_INT_P (XEXP (SET_SRC (x), 2)))
|
||||
{
|
||||
inner = XEXP (SET_SRC (x), 0);
|
||||
len = INTVAL (XEXP (SET_SRC (x), 1));
|
||||
@ -4512,7 +4512,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
||||
return new_rtx;
|
||||
|
||||
if (GET_CODE (x) == SUBREG
|
||||
&& (GET_CODE (new_rtx) == CONST_INT
|
||||
&& (CONST_INT_P (new_rtx)
|
||||
|| GET_CODE (new_rtx) == CONST_DOUBLE))
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
@ -4523,7 +4523,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
|
||||
if (! x)
|
||||
x = gen_rtx_CLOBBER (mode, const0_rtx);
|
||||
}
|
||||
else if (GET_CODE (new_rtx) == CONST_INT
|
||||
else if (CONST_INT_P (new_rtx)
|
||||
&& GET_CODE (x) == ZERO_EXTEND)
|
||||
{
|
||||
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
|
||||
@ -4662,7 +4662,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
|
||||
/* Likewise, we can make the negate of a comparison operation
|
||||
if the result values are - STORE_FLAG_VALUE and zero. */
|
||||
else if (GET_CODE (true_rtx) == CONST_INT
|
||||
else if (CONST_INT_P (true_rtx)
|
||||
&& INTVAL (true_rtx) == - STORE_FLAG_VALUE
|
||||
&& false_rtx == const0_rtx)
|
||||
x = simplify_gen_unary (NEG, mode,
|
||||
@ -4670,7 +4670,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
mode, VOIDmode,
|
||||
cond, cop1),
|
||||
mode);
|
||||
else if (GET_CODE (false_rtx) == CONST_INT
|
||||
else if (CONST_INT_P (false_rtx)
|
||||
&& INTVAL (false_rtx) == - STORE_FLAG_VALUE
|
||||
&& true_rtx == const0_rtx
|
||||
&& ((reversed = reversed_comparison_code_parts
|
||||
@ -4849,7 +4849,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
(neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
|
||||
|
||||
if (GET_CODE (temp) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (temp, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (temp, 1))
|
||||
&& INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
|
||||
INTVAL (XEXP (temp, 1)));
|
||||
@ -4932,14 +4932,14 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
sign_extract. The `and' may be a zero_extend and the two
|
||||
<c>, -<c> constants may be reversed. */
|
||||
if (GET_CODE (XEXP (x, 0)) == XOR
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
|
||||
&& ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
|
||||
|| (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
|
||||
&& GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
|
||||
&& (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
|
||||
== ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
|
||||
|| (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
|
||||
@ -4992,7 +4992,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
/* (minus <foo> (and <foo> (const_int -pow2))) becomes
|
||||
(and <foo> (const_int pow2-1)) */
|
||||
if (GET_CODE (XEXP (x, 1)) == AND
|
||||
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 1), 1))
|
||||
&& exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
|
||||
&& rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
|
||||
return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
|
||||
@ -5026,7 +5026,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
case UDIV:
|
||||
/* If this is a divide by a power of two, treat it as a shift if
|
||||
its first operand is a shift. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
|
||||
&& (GET_CODE (XEXP (x, 0)) == ASHIFT
|
||||
|| GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
@ -5220,7 +5220,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
||||
case ROTATE:
|
||||
case ROTATERT:
|
||||
/* If this is a shift by a constant amount, simplify it. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1)))
|
||||
return simplify_shift_const (x, code, mode, XEXP (x, 0),
|
||||
INTVAL (XEXP (x, 1)));
|
||||
|
||||
@ -5339,7 +5339,7 @@ simplify_if_then_else (rtx x)
|
||||
&& reversed_comparison_code (cond, NULL) != UNKNOWN
|
||||
&& (true_rtx == pc_rtx
|
||||
|| (CONSTANT_P (true_rtx)
|
||||
&& GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
|
||||
&& !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
|
||||
|| true_rtx == const0_rtx
|
||||
|| (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
|
||||
|| (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
|
||||
@ -5556,7 +5556,7 @@ simplify_if_then_else (rtx x)
|
||||
can actually do this more generally, but it doesn't seem worth it. */
|
||||
|
||||
if (true_code == NE && XEXP (cond, 1) == const0_rtx
|
||||
&& false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
|
||||
&& false_rtx == const0_rtx && CONST_INT_P (true_rtx)
|
||||
&& ((1 == nonzero_bits (XEXP (cond, 0), mode)
|
||||
&& (i = exact_log2 (INTVAL (true_rtx))) >= 0)
|
||||
|| ((num_sign_bit_copies (XEXP (cond, 0), mode)
|
||||
@ -5568,7 +5568,7 @@ simplify_if_then_else (rtx x)
|
||||
|
||||
/* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
|
||||
if (true_code == NE && XEXP (cond, 1) == const0_rtx
|
||||
&& false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
|
||||
&& false_rtx == const0_rtx && CONST_INT_P (true_rtx)
|
||||
&& GET_MODE (XEXP (cond, 0)) == mode
|
||||
&& (INTVAL (true_rtx) & GET_MODE_MASK (mode))
|
||||
== nonzero_bits (XEXP (cond, 0), mode)
|
||||
@ -5935,7 +5935,7 @@ simplify_logical (rtx x)
|
||||
/* We can call simplify_and_const_int only if we don't lose
|
||||
any (sign) bits when converting INTVAL (op1) to
|
||||
"unsigned HOST_WIDE_INT". */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op1)
|
||||
&& (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
|| INTVAL (op1) > 0))
|
||||
{
|
||||
@ -6024,7 +6024,7 @@ expand_compound_operation (rtx x)
|
||||
Even for a mode that is no wider than a const_int,
|
||||
we can't win, because we need to sign extend one of its bits through
|
||||
the rest of it, and we don't know which bit. */
|
||||
if (GET_CODE (XEXP (x, 0)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 0)))
|
||||
return x;
|
||||
|
||||
/* Return if (subreg:MODE FROM 0) is not a safe replacement for
|
||||
@ -6064,8 +6064,8 @@ expand_compound_operation (rtx x)
|
||||
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
|
||||
return XEXP (x, 0);
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) != CONST_INT
|
||||
|| GET_CODE (XEXP (x, 2)) != CONST_INT
|
||||
if (!CONST_INT_P (XEXP (x, 1))
|
||||
|| !CONST_INT_P (XEXP (x, 2))
|
||||
|| GET_MODE (XEXP (x, 0)) == VOIDmode)
|
||||
return x;
|
||||
|
||||
@ -6236,24 +6236,24 @@ expand_field_assignment (const_rtx x)
|
||||
pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
|
||||
}
|
||||
else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
|
||||
&& GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (SET_DEST (x), 1)))
|
||||
{
|
||||
inner = XEXP (SET_DEST (x), 0);
|
||||
len = INTVAL (XEXP (SET_DEST (x), 1));
|
||||
pos = XEXP (SET_DEST (x), 2);
|
||||
|
||||
/* A constant position should stay within the width of INNER. */
|
||||
if (GET_CODE (pos) == CONST_INT
|
||||
if (CONST_INT_P (pos)
|
||||
&& INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
|
||||
break;
|
||||
|
||||
if (BITS_BIG_ENDIAN)
|
||||
{
|
||||
if (GET_CODE (pos) == CONST_INT)
|
||||
if (CONST_INT_P (pos))
|
||||
pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
|
||||
- INTVAL (pos));
|
||||
else if (GET_CODE (pos) == MINUS
|
||||
&& GET_CODE (XEXP (pos, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (pos, 1))
|
||||
&& (INTVAL (XEXP (pos, 1))
|
||||
== GET_MODE_BITSIZE (GET_MODE (inner)) - len))
|
||||
/* If position is ADJUST - X, new position is X. */
|
||||
@ -6390,7 +6390,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
||||
inner = SUBREG_REG (inner);
|
||||
}
|
||||
else if (GET_CODE (inner) == ASHIFT
|
||||
&& GET_CODE (XEXP (inner, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (inner, 1))
|
||||
&& pos_rtx == 0 && pos == 0
|
||||
&& len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
|
||||
{
|
||||
@ -6407,7 +6407,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
||||
|
||||
inner_mode = GET_MODE (inner);
|
||||
|
||||
if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
|
||||
if (pos_rtx && CONST_INT_P (pos_rtx))
|
||||
pos = INTVAL (pos_rtx), pos_rtx = 0;
|
||||
|
||||
/* See if this can be done without an extraction. We never can if the
|
||||
@ -6515,7 +6515,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
||||
if (mode == tmode)
|
||||
return new_rtx;
|
||||
|
||||
if (GET_CODE (new_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (new_rtx))
|
||||
return gen_int_mode (INTVAL (new_rtx), mode);
|
||||
|
||||
/* If we know that no extraneous bits are set, and that the high
|
||||
@ -6773,7 +6773,7 @@ extract_left_shift (rtx x, int count)
|
||||
/* This is the shift itself. If it is wide enough, we will return
|
||||
either the value being shifted if the shift count is equal to
|
||||
COUNT or a shift for the difference. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= count)
|
||||
return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
|
||||
INTVAL (XEXP (x, 1)) - count);
|
||||
@ -6788,7 +6788,7 @@ extract_left_shift (rtx x, int count)
|
||||
case PLUS: case IOR: case XOR: case AND:
|
||||
/* If we can safely shift this constant and we find the inner shift,
|
||||
make a new operation. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
|
||||
&& (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
|
||||
return simplify_gen_binary (code, mode, tem,
|
||||
@ -6851,7 +6851,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
case ASHIFT:
|
||||
/* Convert shifts by constants into multiplications if inside
|
||||
an address. */
|
||||
if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& INTVAL (XEXP (x, 1)) >= 0)
|
||||
{
|
||||
@ -6865,7 +6865,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
case AND:
|
||||
/* If the second operand is not a constant, we can't do anything
|
||||
with it. */
|
||||
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
|
||||
if (!CONST_INT_P (XEXP (x, 1)))
|
||||
break;
|
||||
|
||||
/* If the constant is a power of two minus one and the first operand
|
||||
@ -6910,7 +6910,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
of bits in M, this is an extraction. */
|
||||
|
||||
else if (GET_CODE (XEXP (x, 0)) == ROTATE
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
|
||||
&& i <= INTVAL (XEXP (XEXP (x, 0), 1)))
|
||||
{
|
||||
@ -6927,7 +6927,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
&& !have_insn_for (LSHIFTRT, mode)
|
||||
&& have_insn_for (ASHIFTRT, mode)
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT)
|
||||
@ -6987,9 +6987,9 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
|
||||
/* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
|
||||
this is a SIGN_EXTRACT. */
|
||||
if (GET_CODE (rhs) == CONST_INT
|
||||
if (CONST_INT_P (rhs)
|
||||
&& GET_CODE (lhs) == ASHIFT
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (lhs, 1))
|
||||
&& INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
|
||||
&& INTVAL (rhs) < mode_width)
|
||||
{
|
||||
@ -7009,7 +7009,7 @@ make_compound_operation (rtx x, enum rtx_code in_code)
|
||||
if (!OBJECT_P (lhs)
|
||||
&& ! (GET_CODE (lhs) == SUBREG
|
||||
&& (OBJECT_P (SUBREG_REG (lhs))))
|
||||
&& GET_CODE (rhs) == CONST_INT
|
||||
&& CONST_INT_P (rhs)
|
||||
&& INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
|
||||
&& INTVAL (rhs) < mode_width
|
||||
&& (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
|
||||
@ -7300,7 +7300,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
|
||||
/* If X is a CONST_INT, return a new one. Do this here since the
|
||||
test below will fail. */
|
||||
if (GET_CODE (x) == CONST_INT)
|
||||
if (CONST_INT_P (x))
|
||||
{
|
||||
if (SCALAR_INT_MODE_P (mode))
|
||||
return gen_int_mode (INTVAL (x) & mask, mode);
|
||||
@ -7355,7 +7355,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
whose constant is the AND of that constant with MASK. If it
|
||||
remains an AND of MASK, delete it since it is redundant. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1)))
|
||||
{
|
||||
x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
|
||||
mask & INTVAL (XEXP (x, 1)));
|
||||
@ -7364,7 +7364,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
is just some low-order bits. If so, and it is MASK, we don't
|
||||
need it. */
|
||||
|
||||
if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
|
||||
&& ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
|
||||
== mask))
|
||||
x = XEXP (x, 0);
|
||||
@ -7374,7 +7374,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
constant in the AND is wide enough, this might make a
|
||||
cheaper constant. */
|
||||
|
||||
if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
|
||||
&& GET_MODE_MASK (GET_MODE (x)) != mask
|
||||
&& GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -7418,7 +7418,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
&& (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
|
||||
smask |= (HOST_WIDE_INT) -1 << width;
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& exact_log2 (- smask) >= 0
|
||||
&& (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
|
||||
&& (INTVAL (XEXP (x, 1)) & ~smask) != 0)
|
||||
@ -7439,7 +7439,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
case MINUS:
|
||||
/* If X is (minus C Y) where C's least set bit is larger than any bit
|
||||
in the mask, then we may replace with (neg Y). */
|
||||
if (GET_CODE (XEXP (x, 0)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 0))
|
||||
&& (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
|
||||
& -INTVAL (XEXP (x, 0))))
|
||||
> mask))
|
||||
@ -7451,7 +7451,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
|
||||
/* Similarly, if C contains every bit in the fuller_mask, then we may
|
||||
replace with (not Y). */
|
||||
if (GET_CODE (XEXP (x, 0)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 0))
|
||||
&& ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
|
||||
== INTVAL (XEXP (x, 0))))
|
||||
{
|
||||
@ -7471,10 +7471,10 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
constant we form is not wider than the mode of X. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& ((INTVAL (XEXP (XEXP (x, 0), 1))
|
||||
+ floor_log2 (INTVAL (XEXP (x, 1))))
|
||||
< GET_MODE_BITSIZE (GET_MODE (x)))
|
||||
@ -7512,7 +7512,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
because such a count will have a different meaning in a
|
||||
wider mode. */
|
||||
|
||||
if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (! (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0
|
||||
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
|
||||
&& ! (GET_MODE (XEXP (x, 1)) != VOIDmode
|
||||
@ -7523,7 +7523,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
/* If the shift count is a constant and we can do arithmetic in
|
||||
the mode of the shift, refine which bits we need. Otherwise, use the
|
||||
conservative form of the mask. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0
|
||||
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
|
||||
&& GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
|
||||
@ -7544,7 +7544,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
this shift constant is valid for the host, and we can do arithmetic
|
||||
in OP_MODE. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -7571,7 +7571,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
than a power of two), we can do this with just a shift. */
|
||||
|
||||
if (GET_CODE (x) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
/* The shift puts one of the sign bit copies in the least significant
|
||||
bit. */
|
||||
&& ((INTVAL (XEXP (x, 1))
|
||||
@ -7606,7 +7606,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
those bits, we are requesting a copy of the sign bit and hence can
|
||||
shift the sign bit to the appropriate location. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
|
||||
if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
|
||||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
int i;
|
||||
@ -7668,7 +7668,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
something that is still a shift. */
|
||||
|
||||
if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0
|
||||
&& (INTVAL (XEXP (x, 1))
|
||||
<= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
|
||||
@ -7685,13 +7685,13 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
in the mode of X, compute where the bits we care about are.
|
||||
Otherwise, we can't do anything. Don't change the mode of
|
||||
the shift or propagate MODE into the shift, though. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0)
|
||||
{
|
||||
temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
|
||||
GET_MODE (x), GEN_INT (mask),
|
||||
XEXP (x, 1));
|
||||
if (temp && GET_CODE (temp) == CONST_INT)
|
||||
if (temp && CONST_INT_P (temp))
|
||||
SUBST (XEXP (x, 0),
|
||||
force_to_mode (XEXP (x, 0), GET_MODE (x),
|
||||
INTVAL (temp), next_select));
|
||||
@ -7716,7 +7716,7 @@ force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
|
||||
wider than the mode of X. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
|
||||
&& (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
|
||||
< GET_MODE_BITSIZE (GET_MODE (x)))
|
||||
@ -8205,7 +8205,7 @@ make_field_assignment (rtx x)
|
||||
for a SUBREG. */
|
||||
|
||||
if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
|
||||
&& GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (src, 0), 0))
|
||||
&& INTVAL (XEXP (XEXP (src, 0), 0)) == -2
|
||||
&& rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
|
||||
{
|
||||
@ -8221,7 +8221,7 @@ make_field_assignment (rtx x)
|
||||
&& (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
|
||||
&& GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
|
||||
&& GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
|
||||
&& INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
|
||||
&& rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
|
||||
{
|
||||
@ -8250,9 +8250,9 @@ make_field_assignment (rtx x)
|
||||
SRC is an AND with all bits of that field set, then we can discard
|
||||
the AND. */
|
||||
if (GET_CODE (dest) == ZERO_EXTRACT
|
||||
&& GET_CODE (XEXP (dest, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (dest, 1))
|
||||
&& GET_CODE (src) == AND
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
{
|
||||
HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
|
||||
unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
|
||||
@ -8292,11 +8292,11 @@ make_field_assignment (rtx x)
|
||||
lhs = expand_compound_operation (XEXP (src, 1));
|
||||
|
||||
if (GET_CODE (rhs) == AND
|
||||
&& GET_CODE (XEXP (rhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (rhs, 1))
|
||||
&& rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
|
||||
c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
|
||||
else if (GET_CODE (lhs) == AND
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (lhs, 1))
|
||||
&& rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
|
||||
c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
|
||||
else
|
||||
@ -8333,10 +8333,10 @@ make_field_assignment (rtx x)
|
||||
/* If SRC is masked by an AND that does not make a difference in
|
||||
the value being stored, strip it. */
|
||||
if (GET_CODE (assign) == ZERO_EXTRACT
|
||||
&& GET_CODE (XEXP (assign, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (assign, 1))
|
||||
&& INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (src) == AND
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (src, 1))
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
|
||||
== ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
|
||||
src = XEXP (src, 0);
|
||||
@ -8576,7 +8576,7 @@ simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
|
||||
|
||||
/* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
|
||||
to VAROP and return the new constant. */
|
||||
if (GET_CODE (varop) == CONST_INT)
|
||||
if (CONST_INT_P (varop))
|
||||
return gen_int_mode (INTVAL (varop) & constop, mode);
|
||||
|
||||
/* See what bits may be nonzero in VAROP. Unlike the general case of
|
||||
@ -8732,7 +8732,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
|
||||
instead of this kludge. */
|
||||
|
||||
if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
|
||||
&& GET_CODE (tem) == CONST_INT
|
||||
&& CONST_INT_P (tem)
|
||||
&& INTVAL (tem) > 0
|
||||
&& 0 != (INTVAL (tem)
|
||||
& ((HOST_WIDE_INT) 1
|
||||
@ -9151,7 +9151,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
/* Some machines use MULT instead of ASHIFT because MULT
|
||||
is cheaper. But it is still better on those machines to
|
||||
merge two shifts into one. */
|
||||
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (varop, 1))
|
||||
&& exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
|
||||
{
|
||||
varop
|
||||
@ -9165,7 +9165,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
|
||||
case UDIV:
|
||||
/* Similar, for when divides are cheaper. */
|
||||
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (varop, 1))
|
||||
&& exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
|
||||
{
|
||||
varop
|
||||
@ -9199,7 +9199,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
case ROTATE:
|
||||
/* Here we have two nested shifts. The result is usually the
|
||||
AND of a new shift with a mask. We compute the result below. */
|
||||
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (varop, 1))
|
||||
&& INTVAL (XEXP (varop, 1)) >= 0
|
||||
&& INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
|
||||
&& GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
|
||||
@ -9307,7 +9307,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
|
||||
/* Give up if we can't compute an outer operation to use. */
|
||||
if (mask_rtx == 0
|
||||
|| GET_CODE (mask_rtx) != CONST_INT
|
||||
|| !CONST_INT_P (mask_rtx)
|
||||
|| ! merge_outer_ops (&outer_op, &outer_const, AND,
|
||||
INTVAL (mask_rtx),
|
||||
result_mode, &complement_p))
|
||||
@ -9341,8 +9341,8 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
B is not a constant. */
|
||||
|
||||
else if (GET_CODE (varop) == code
|
||||
&& GET_CODE (XEXP (varop, 0)) == CONST_INT
|
||||
&& GET_CODE (XEXP (varop, 1)) != CONST_INT)
|
||||
&& CONST_INT_P (XEXP (varop, 0))
|
||||
&& !CONST_INT_P (XEXP (varop, 1)))
|
||||
{
|
||||
rtx new_rtx = simplify_const_binary_operation (code, mode,
|
||||
XEXP (varop, 0),
|
||||
@ -9395,7 +9395,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
what a ZERO_EXTRACT looks like. Also, some machines have
|
||||
(and (shift)) insns. */
|
||||
|
||||
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (varop, 1))
|
||||
/* We can't do this if we have (ashiftrt (xor)) and the
|
||||
constant has its sign bit set in shift_mode. */
|
||||
&& !(code == ASHIFTRT && GET_CODE (varop) == XOR
|
||||
@ -9404,7 +9404,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
&& (new_rtx = simplify_const_binary_operation (code, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& CONST_INT_P (new_rtx)
|
||||
&& merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
@ -9416,7 +9416,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
logical expression, make a new logical expression, and apply
|
||||
the inverse distributive law. This also can't be done
|
||||
for some (ashiftrt (xor)). */
|
||||
if (GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (varop, 1))
|
||||
&& !(code == ASHIFTRT && GET_CODE (varop) == XOR
|
||||
&& 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
|
||||
shift_mode)))
|
||||
@ -9528,11 +9528,11 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
|
||||
/* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
|
||||
if (code == ASHIFT
|
||||
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (varop, 1))
|
||||
&& (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& CONST_INT_P (new_rtx)
|
||||
&& merge_outer_ops (&outer_op, &outer_const, PLUS,
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
@ -9546,12 +9546,12 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
leg for shift(logical). See details in logical handling above
|
||||
for reasoning in doing so. */
|
||||
if (code == LSHIFTRT
|
||||
&& GET_CODE (XEXP (varop, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (varop, 1))
|
||||
&& mode_signbit_p (result_mode, XEXP (varop, 1))
|
||||
&& (new_rtx = simplify_const_binary_operation (code, result_mode,
|
||||
XEXP (varop, 1),
|
||||
GEN_INT (count))) != 0
|
||||
&& GET_CODE (new_rtx) == CONST_INT
|
||||
&& CONST_INT_P (new_rtx)
|
||||
&& merge_outer_ops (&outer_op, &outer_const, XOR,
|
||||
INTVAL (new_rtx), result_mode, &complement_p))
|
||||
{
|
||||
@ -9573,7 +9573,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
&& GET_CODE (XEXP (varop, 0)) == ASHIFTRT
|
||||
&& count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
|
||||
&& (code == LSHIFTRT || code == ASHIFTRT)
|
||||
&& GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (varop, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (varop, 0), 1)) == count
|
||||
&& rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
|
||||
{
|
||||
@ -9593,7 +9593,7 @@ simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
|
||||
if the truncate does not affect the value. */
|
||||
if (code == LSHIFTRT
|
||||
&& GET_CODE (XEXP (varop, 0)) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (varop, 0), 1))
|
||||
&& (INTVAL (XEXP (XEXP (varop, 0), 1))
|
||||
>= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
|
||||
- GET_MODE_BITSIZE (GET_MODE (varop)))))
|
||||
@ -9881,7 +9881,7 @@ gen_lowpart_for_combine (enum machine_mode omode, rtx x)
|
||||
constant integer or has a mode the same size. */
|
||||
if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
|
||||
&& ! ((imode == VOIDmode
|
||||
&& (GET_CODE (x) == CONST_INT
|
||||
&& (CONST_INT_P (x)
|
||||
|| GET_CODE (x) == CONST_DOUBLE))
|
||||
|| isize == osize))
|
||||
goto fail;
|
||||
@ -9999,7 +9999,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
&& GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
|
||||
&& (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
|
||||
== GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& XEXP (op0, 1) == XEXP (op1, 1)
|
||||
&& XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
|
||||
&& XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
|
||||
@ -10025,7 +10025,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
|| (GET_CODE (op0) == ASHIFTRT
|
||||
&& (code != GTU && code != LTU
|
||||
&& code != GEU && code != LEU)))
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& INTVAL (XEXP (op0, 1)) >= 0
|
||||
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& XEXP (op0, 1) == XEXP (op1, 1))
|
||||
@ -10060,8 +10060,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
present. */
|
||||
|
||||
else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op1, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& CONST_INT_P (XEXP (op1, 1)))
|
||||
{
|
||||
rtx inner_op0 = XEXP (op0, 0);
|
||||
rtx inner_op1 = XEXP (op1, 0);
|
||||
@ -10134,7 +10134,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
but some things may really be comparisons with zero but not start
|
||||
out looking that way. */
|
||||
|
||||
while (GET_CODE (op1) == CONST_INT)
|
||||
while (CONST_INT_P (op1))
|
||||
{
|
||||
enum machine_mode mode = GET_MODE (op0);
|
||||
unsigned int mode_width = GET_MODE_BITSIZE (mode);
|
||||
@ -10358,7 +10358,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
/* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
|
||||
have already reduced the shift count modulo the word size. */
|
||||
if (!SHIFT_COUNT_TRUNCATED
|
||||
&& GET_CODE (XEXP (op0, 0)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 0))
|
||||
&& XEXP (op0, 1) == const1_rtx
|
||||
&& equality_comparison_p && const_op == 0
|
||||
&& (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
|
||||
@ -10457,7 +10457,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
case ROTATE:
|
||||
/* If we are testing equality and our count is a constant, we
|
||||
can perform the inverse operation on our RHS. */
|
||||
if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
|
||||
&& (tem = simplify_binary_operation (ROTATERT, mode,
|
||||
op1, XEXP (op0, 1))) != 0)
|
||||
{
|
||||
@ -10470,7 +10470,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
a particular bit. Convert it to an AND of a constant of that
|
||||
bit. This will be converted into a ZERO_EXTRACT. */
|
||||
if (const_op == 0 && sign_bit_comparison_p
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
|
||||
@ -10540,7 +10540,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
&& subreg_lowpart_p (op0)
|
||||
&& GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
|
||||
&& GET_CODE (SUBREG_REG (op0)) == PLUS
|
||||
&& GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
|
||||
{
|
||||
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
|
||||
rtx a = XEXP (SUBREG_REG (op0), 0);
|
||||
@ -10650,7 +10650,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
/* The sign bit of (minus (ashiftrt X C) X), where C is the number
|
||||
of bits in X minus 1, is one iff X > 0. */
|
||||
if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (op0, 0), 1))
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
|
||||
== mode_width - 1
|
||||
&& rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
|
||||
@ -10751,9 +10751,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
in STORE_FLAG_VALUE, we can compare with X. */
|
||||
if (const_op == 0 && equality_comparison_p
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& GET_CODE (XEXP (op0, 0)) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (op0, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
|
||||
&& INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -10774,7 +10774,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
the underlying value. */
|
||||
if (equality_comparison_p
|
||||
&& const_op == 0
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
|
||||
== (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
|
||||
@ -10798,7 +10798,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
known to hold a value of the required mode the
|
||||
transformation is invalid. */
|
||||
if ((equality_comparison_p || unsigned_comparison_p)
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& (i = exact_log2 ((INTVAL (XEXP (op0, 1))
|
||||
& GET_MODE_MASK (mode))
|
||||
+ 1)) >= 0
|
||||
@ -10838,7 +10838,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
#endif
|
||||
|| (mode_width <= GET_MODE_BITSIZE (tmode)
|
||||
&& subreg_lowpart_p (XEXP (op0, 0))))
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
|
||||
@ -10877,8 +10877,8 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
|
||||
if (GET_CODE (shift_op) == NOT
|
||||
|| (GET_CODE (shift_op) == XOR
|
||||
&& GET_CODE (XEXP (shift_op, 1)) == CONST_INT
|
||||
&& GET_CODE (shift_count) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (shift_op, 1))
|
||||
&& CONST_INT_P (shift_count)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (INTVAL (XEXP (shift_op, 1))
|
||||
== (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
|
||||
@ -10899,7 +10899,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
are known to be zero, we can do this by comparing FOO with C
|
||||
shifted right N bits so long as the low-order N bits of C are
|
||||
zero. */
|
||||
if (GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (op0, 1))
|
||||
&& INTVAL (XEXP (op0, 1)) >= 0
|
||||
&& ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
|
||||
< HOST_BITS_PER_WIDE_INT)
|
||||
@ -10922,7 +10922,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
|
||||
/* If we are doing a sign bit comparison, it means we are testing
|
||||
a particular bit. Convert it to the appropriate AND. */
|
||||
if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
|
||||
@ -10937,7 +10937,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
the low bit to the sign bit, we can convert this to an AND of the
|
||||
low-order bit. */
|
||||
if (const_op == 0 && equality_comparison_p
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
|
||||
== mode_width - 1)
|
||||
{
|
||||
@ -10951,7 +10951,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
/* If this is an equality comparison with zero, we can do this
|
||||
as a logical shift, which might be much simpler. */
|
||||
if (equality_comparison_p && const_op == 0
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (op0, 1)))
|
||||
{
|
||||
op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
|
||||
XEXP (op0, 0),
|
||||
@ -10962,7 +10962,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
/* If OP0 is a sign extension and CODE is not an unsigned comparison,
|
||||
do the comparison in a narrower mode. */
|
||||
if (! unsigned_comparison_p
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& GET_CODE (XEXP (op0, 0)) == ASHIFT
|
||||
&& XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
|
||||
&& (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
|
||||
@ -10979,9 +10979,9 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
constant, which is usually represented with the PLUS
|
||||
between the shifts. */
|
||||
if (! unsigned_comparison_p
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& GET_CODE (XEXP (op0, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (op0, 0), 1))
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
|
||||
&& XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
|
||||
&& (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
|
||||
@ -11007,7 +11007,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
the low order N bits of FOO are known to be zero, we can do this
|
||||
by comparing FOO with C shifted left N bits so long as no
|
||||
overflow occurs. */
|
||||
if (GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (op0, 1))
|
||||
&& INTVAL (XEXP (op0, 1)) >= 0
|
||||
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& mode_width <= HOST_BITS_PER_WIDE_INT
|
||||
@ -11035,7 +11035,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
can replace this with an LT or GE comparison. */
|
||||
if (const_op == 0
|
||||
&& (equality_comparison_p || sign_bit_comparison_p)
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
|
||||
== mode_width - 1)
|
||||
{
|
||||
@ -11134,7 +11134,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
|| code == LEU || code == LTU)
|
||||
&& (nonzero_bits (op0, tmode)
|
||||
& ~GET_MODE_MASK (mode)) == 0
|
||||
&& ((GET_CODE (op1) == CONST_INT
|
||||
&& ((CONST_INT_P (op1)
|
||||
|| (nonzero_bits (op1, tmode)
|
||||
& ~GET_MODE_MASK (mode)) == 0)));
|
||||
|
||||
@ -11157,7 +11157,7 @@ simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
|
||||
XEXP (op0, 1)));
|
||||
|
||||
op0 = gen_lowpart (tmode, op0);
|
||||
if (zero_extended && GET_CODE (op1) == CONST_INT)
|
||||
if (zero_extended && CONST_INT_P (op1))
|
||||
op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
|
||||
op1 = gen_lowpart (tmode, op1);
|
||||
break;
|
||||
|
30
gcc/cse.c
30
gcc/cse.c
@ -643,7 +643,7 @@ fixed_base_plus_p (rtx x)
|
||||
return false;
|
||||
|
||||
case PLUS:
|
||||
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
|
||||
if (!CONST_INT_P (XEXP (x, 1)))
|
||||
return false;
|
||||
return fixed_base_plus_p (XEXP (x, 0));
|
||||
|
||||
@ -2813,7 +2813,7 @@ cse_rtx_varies_p (const_rtx x, bool from_alias)
|
||||
}
|
||||
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& REG_P (XEXP (x, 0))
|
||||
&& REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
|
||||
{
|
||||
@ -3525,7 +3525,7 @@ fold_rtx (rtx x, rtx insn)
|
||||
|
||||
if (y != 0
|
||||
&& (inner_const = equiv_constant (XEXP (y, 1))) != 0
|
||||
&& GET_CODE (inner_const) == CONST_INT
|
||||
&& CONST_INT_P (inner_const)
|
||||
&& INTVAL (inner_const) != 0)
|
||||
folded_arg0 = gen_rtx_IOR (mode_arg0, XEXP (y, 0), inner_const);
|
||||
}
|
||||
@ -3595,7 +3595,7 @@ fold_rtx (rtx x, rtx insn)
|
||||
the smallest negative number this would overflow: depending
|
||||
on the mode, this would either just be the same value (and
|
||||
hence not save anything) or be incorrect. */
|
||||
if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
|
||||
if (const_arg1 != 0 && CONST_INT_P (const_arg1)
|
||||
&& INTVAL (const_arg1) < 0
|
||||
/* This used to test
|
||||
|
||||
@ -3623,10 +3623,10 @@ fold_rtx (rtx x, rtx insn)
|
||||
case MINUS:
|
||||
/* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
|
||||
If so, produce (PLUS Z C2-C). */
|
||||
if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
|
||||
if (const_arg1 != 0 && CONST_INT_P (const_arg1))
|
||||
{
|
||||
rtx y = lookup_as_function (XEXP (x, 0), PLUS);
|
||||
if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
|
||||
if (y && CONST_INT_P (XEXP (y, 1)))
|
||||
return fold_rtx (plus_constant (copy_rtx (y),
|
||||
-INTVAL (const_arg1)),
|
||||
NULL_RTX);
|
||||
@ -3647,7 +3647,7 @@ fold_rtx (rtx x, rtx insn)
|
||||
if the intermediate operation's result has only one reference. */
|
||||
|
||||
if (REG_P (folded_arg0)
|
||||
&& const_arg1 && GET_CODE (const_arg1) == CONST_INT)
|
||||
&& const_arg1 && CONST_INT_P (const_arg1))
|
||||
{
|
||||
int is_shift
|
||||
= (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
|
||||
@ -3680,7 +3680,7 @@ fold_rtx (rtx x, rtx insn)
|
||||
break;
|
||||
|
||||
inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
|
||||
if (!inner_const || GET_CODE (inner_const) != CONST_INT)
|
||||
if (!inner_const || !CONST_INT_P (inner_const))
|
||||
break;
|
||||
|
||||
/* Don't associate these operations if they are a PLUS with the
|
||||
@ -3734,7 +3734,7 @@ fold_rtx (rtx x, rtx insn)
|
||||
of shifts. */
|
||||
|
||||
if (is_shift
|
||||
&& GET_CODE (new_const) == CONST_INT
|
||||
&& CONST_INT_P (new_const)
|
||||
&& INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
|
||||
{
|
||||
/* As an exception, we can turn an ASHIFTRT of this
|
||||
@ -4506,8 +4506,8 @@ cse_insn (rtx insn)
|
||||
{
|
||||
rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
|
||||
|
||||
if (GET_CODE (src) == CONST_INT
|
||||
&& GET_CODE (width) == CONST_INT
|
||||
if (CONST_INT_P (src)
|
||||
&& CONST_INT_P (width)
|
||||
&& INTVAL (width) < HOST_BITS_PER_WIDE_INT
|
||||
&& (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
|
||||
src_folded
|
||||
@ -4668,7 +4668,7 @@ cse_insn (rtx insn)
|
||||
/* See if we have a CONST_INT that is already in a register in a
|
||||
wider mode. */
|
||||
|
||||
if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
|
||||
if (src_const && src_related == 0 && CONST_INT_P (src_const)
|
||||
&& GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
|
||||
{
|
||||
@ -4703,7 +4703,7 @@ cse_insn (rtx insn)
|
||||
value. */
|
||||
|
||||
if (flag_expensive_optimizations && ! src_related
|
||||
&& GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
|
||||
&& GET_CODE (src) == AND && CONST_INT_P (XEXP (src, 1))
|
||||
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
|
||||
{
|
||||
enum machine_mode tmode;
|
||||
@ -5226,8 +5226,8 @@ cse_insn (rtx insn)
|
||||
{
|
||||
rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
|
||||
|
||||
if (src_const != 0 && GET_CODE (src_const) == CONST_INT
|
||||
&& GET_CODE (width) == CONST_INT
|
||||
if (src_const != 0 && CONST_INT_P (src_const)
|
||||
&& CONST_INT_P (width)
|
||||
&& INTVAL (width) < HOST_BITS_PER_WIDE_INT
|
||||
&& ! (INTVAL (src_const)
|
||||
& ((HOST_WIDE_INT) (-1) << INTVAL (width))))
|
||||
|
@ -236,7 +236,7 @@ entry_and_rtx_equal_p (const void *entry, const void *x_arg)
|
||||
rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
|
||||
enum machine_mode mode = GET_MODE (x);
|
||||
|
||||
gcc_assert (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
|
||||
gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
|
||||
&& (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
|
||||
|
||||
if (mode != GET_MODE (v->val_rtx))
|
||||
@ -244,7 +244,7 @@ entry_and_rtx_equal_p (const void *entry, const void *x_arg)
|
||||
|
||||
/* Unwrap X if necessary. */
|
||||
if (GET_CODE (x) == CONST
|
||||
&& (GET_CODE (XEXP (x, 0)) == CONST_INT
|
||||
&& (CONST_INT_P (XEXP (x, 0))
|
||||
|| GET_CODE (XEXP (x, 0)) == CONST_FIXED
|
||||
|| GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
|
||||
x = XEXP (x, 0);
|
||||
@ -555,7 +555,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
|
||||
static rtx
|
||||
wrap_constant (enum machine_mode mode, rtx x)
|
||||
{
|
||||
if (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
|
||||
if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
|
||||
&& (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
|
||||
return x;
|
||||
gcc_assert (mode != VOIDmode);
|
||||
|
@ -3032,7 +3032,7 @@ dbxout_symbol_location (tree decl, tree type, const char *suffix, rtx home)
|
||||
}
|
||||
else if (MEM_P (home)
|
||||
&& GET_CODE (XEXP (home, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (home, 0), 1)))
|
||||
{
|
||||
code = N_LSYM;
|
||||
/* RTL looks like (MEM (PLUS (REG...) (CONST_INT...)))
|
||||
@ -3218,7 +3218,7 @@ dbxout_common_check (tree decl, int *value)
|
||||
switch (GET_CODE (sym_addr))
|
||||
{
|
||||
case PLUS:
|
||||
if (GET_CODE (XEXP (sym_addr, 0)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (sym_addr, 0)))
|
||||
{
|
||||
name =
|
||||
targetm.strip_name_encoding(XSTR (XEXP (sym_addr, 1), 0));
|
||||
@ -3351,7 +3351,7 @@ dbxout_parms (tree parms)
|
||||
If that is not true, we produce meaningless results,
|
||||
but do not crash. */
|
||||
if (GET_CODE (inrtl) == PLUS
|
||||
&& GET_CODE (XEXP (inrtl, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (inrtl, 1)))
|
||||
number = INTVAL (XEXP (inrtl, 1));
|
||||
else
|
||||
number = 0;
|
||||
|
@ -2964,8 +2964,8 @@ df_def_record_1 (struct df_collection_rec *collection_rec,
|
||||
{
|
||||
flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
|
||||
|
||||
if (GET_CODE (XEXP (dst, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (dst, 2)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (dst, 1))
|
||||
&& CONST_INT_P (XEXP (dst, 2)))
|
||||
{
|
||||
width = INTVAL (XEXP (dst, 1));
|
||||
offset = INTVAL (XEXP (dst, 2));
|
||||
@ -3120,8 +3120,8 @@ df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
|
||||
/* If the parameters to the zero or sign extract are
|
||||
constants, strip them off and recurse, otherwise there is
|
||||
no information that we can gain from this operation. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (x, 2)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& CONST_INT_P (XEXP (x, 2)))
|
||||
{
|
||||
width = INTVAL (XEXP (x, 1));
|
||||
offset = INTVAL (XEXP (x, 2));
|
||||
@ -3186,8 +3186,8 @@ df_uses_record (enum df_ref_class cl, struct df_collection_rec *collection_rec,
|
||||
break;
|
||||
case ZERO_EXTRACT:
|
||||
{
|
||||
if (GET_CODE (XEXP (dst, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (dst, 2)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (dst, 1))
|
||||
&& CONST_INT_P (XEXP (dst, 2)))
|
||||
{
|
||||
width = INTVAL (XEXP (dst, 1));
|
||||
offset = INTVAL (XEXP (dst, 2));
|
||||
|
@ -1150,7 +1150,7 @@ canon_address (rtx mem,
|
||||
if (GET_CODE (address) == CONST)
|
||||
address = XEXP (address, 0);
|
||||
|
||||
if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
|
||||
if (GET_CODE (address) == PLUS && CONST_INT_P (XEXP (address, 1)))
|
||||
{
|
||||
*offset = INTVAL (XEXP (address, 1));
|
||||
address = XEXP (address, 0);
|
||||
|
@ -52,7 +52,7 @@ dw2_assemble_integer (int size, rtx x)
|
||||
if (op)
|
||||
{
|
||||
fputs (op, asm_out_file);
|
||||
if (GET_CODE (x) == CONST_INT)
|
||||
if (CONST_INT_P (x))
|
||||
fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX,
|
||||
(unsigned HOST_WIDE_INT) INTVAL (x));
|
||||
else
|
||||
|
@ -1197,7 +1197,7 @@ initial_return_save (rtx rtl)
|
||||
/* The return address is at some offset from any value we can
|
||||
actually load. For instance, on the SPARC it is in %i7+8. Just
|
||||
ignore the offset for now; it doesn't matter for unwinding frames. */
|
||||
gcc_assert (GET_CODE (XEXP (rtl, 1)) == CONST_INT);
|
||||
gcc_assert (CONST_INT_P (XEXP (rtl, 1)));
|
||||
initial_return_save (XEXP (rtl, 0));
|
||||
return;
|
||||
|
||||
@ -1239,7 +1239,7 @@ stack_adjust_offset (const_rtx pattern, HOST_WIDE_INT cur_args_size,
|
||||
|
||||
if (! (code == PLUS || code == MINUS)
|
||||
|| XEXP (src, 0) != stack_pointer_rtx
|
||||
|| GET_CODE (XEXP (src, 1)) != CONST_INT)
|
||||
|| !CONST_INT_P (XEXP (src, 1)))
|
||||
return 0;
|
||||
|
||||
/* (set (reg sp) (plus (reg sp) (const_int))) */
|
||||
@ -1266,7 +1266,7 @@ stack_adjust_offset (const_rtx pattern, HOST_WIDE_INT cur_args_size,
|
||||
rtx val = XEXP (XEXP (src, 1), 1);
|
||||
/* We handle only adjustments by constant amount. */
|
||||
gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS
|
||||
&& GET_CODE (val) == CONST_INT);
|
||||
&& CONST_INT_P (val));
|
||||
offset = -INTVAL (val);
|
||||
break;
|
||||
}
|
||||
@ -2185,17 +2185,17 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
|
||||
fde = current_fde ();
|
||||
|
||||
if (GET_CODE (src) == REG
|
||||
if (REG_P (src)
|
||||
&& fde
|
||||
&& fde->drap_reg == REGNO (src)
|
||||
&& (fde->drap_reg_saved
|
||||
|| GET_CODE (dest) == REG))
|
||||
|| REG_P (dest)))
|
||||
{
|
||||
/* Rule 20 */
|
||||
/* If we are saving dynamic realign argument pointer to a
|
||||
register, the destination is virtual dynamic realign
|
||||
argument pointer. It may be used to access argument. */
|
||||
if (GET_CODE (dest) == REG)
|
||||
if (REG_P (dest))
|
||||
{
|
||||
gcc_assert (fde->vdrap_reg == INVALID_REGNUM);
|
||||
fde->vdrap_reg = REGNO (dest);
|
||||
@ -2296,7 +2296,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
|
||||
gcc_assert (REG_P (XEXP (src, 0))
|
||||
&& (unsigned) REGNO (XEXP (src, 0)) == cfa.reg
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT);
|
||||
&& CONST_INT_P (XEXP (src, 1)));
|
||||
offset = INTVAL (XEXP (src, 1));
|
||||
if (GET_CODE (src) != MINUS)
|
||||
offset = -offset;
|
||||
@ -2310,7 +2310,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
/* Rule 4 */
|
||||
if (REG_P (XEXP (src, 0))
|
||||
&& REGNO (XEXP (src, 0)) == cfa.reg
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
{
|
||||
/* Setting a temporary CFA register that will be copied
|
||||
into the FP later on. */
|
||||
@ -2336,7 +2336,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
|
||||
/* Rule 9 */
|
||||
else if (GET_CODE (src) == LO_SUM
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
{
|
||||
cfa_temp.reg = REGNO (dest);
|
||||
cfa_temp.offset = INTVAL (XEXP (src, 1));
|
||||
@ -2356,7 +2356,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
case IOR:
|
||||
gcc_assert (REG_P (XEXP (src, 0))
|
||||
&& (unsigned) REGNO (XEXP (src, 0)) == cfa_temp.reg
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT);
|
||||
&& CONST_INT_P (XEXP (src, 1)));
|
||||
|
||||
if ((unsigned) REGNO (dest) != cfa_temp.reg)
|
||||
cfa_temp.reg = REGNO (dest);
|
||||
@ -2463,7 +2463,7 @@ dwarf2out_frame_debug_expr (rtx expr, const char *label)
|
||||
{
|
||||
int regno;
|
||||
|
||||
gcc_assert (GET_CODE (XEXP (XEXP (dest, 0), 1)) == CONST_INT
|
||||
gcc_assert (CONST_INT_P (XEXP (XEXP (dest, 0), 1))
|
||||
&& REG_P (XEXP (XEXP (dest, 0), 0)));
|
||||
offset = INTVAL (XEXP (XEXP (dest, 0), 1));
|
||||
if (GET_CODE (XEXP (dest, 0)) == MINUS)
|
||||
@ -10277,7 +10277,7 @@ is_based_loc (const_rtx rtl)
|
||||
return (GET_CODE (rtl) == PLUS
|
||||
&& ((REG_P (XEXP (rtl, 0))
|
||||
&& REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
|
||||
&& GET_CODE (XEXP (rtl, 1)) == CONST_INT)));
|
||||
&& CONST_INT_P (XEXP (rtl, 1)))));
|
||||
}
|
||||
|
||||
/* Return a descriptor that describes the concatenation of N locations
|
||||
@ -10498,7 +10498,7 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode,
|
||||
if (mem_loc_result == 0)
|
||||
break;
|
||||
|
||||
if (GET_CODE (XEXP (rtl, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (rtl, 1)))
|
||||
loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
|
||||
else
|
||||
{
|
||||
@ -10832,7 +10832,7 @@ loc_descriptor_from_tree_1 (tree loc, int want_address)
|
||||
|
||||
if (rtl == NULL_RTX)
|
||||
return 0;
|
||||
else if (GET_CODE (rtl) == CONST_INT)
|
||||
else if (CONST_INT_P (rtl))
|
||||
{
|
||||
HOST_WIDE_INT val = INTVAL (rtl);
|
||||
if (TYPE_UNSIGNED (TREE_TYPE (loc)))
|
||||
|
@ -975,7 +975,7 @@ set_reg_attrs_from_value (rtx reg, rtx x)
|
||||
offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
|
||||
if (MEM_P (x))
|
||||
{
|
||||
if (MEM_OFFSET (x) && GET_CODE (MEM_OFFSET (x)) == CONST_INT)
|
||||
if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
|
||||
REG_ATTRS (reg)
|
||||
= get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
|
||||
if (MEM_POINTER (x))
|
||||
@ -1181,7 +1181,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x)
|
||||
/* Unfortunately, this routine doesn't take a parameter for the mode of X,
|
||||
so we have to make one up. Yuk. */
|
||||
innermode = GET_MODE (x);
|
||||
if (GET_CODE (x) == CONST_INT
|
||||
if (CONST_INT_P (x)
|
||||
&& msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
|
||||
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
|
||||
else if (innermode == VOIDmode)
|
||||
@ -1226,7 +1226,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x)
|
||||
}
|
||||
else if (GET_CODE (x) == SUBREG || REG_P (x)
|
||||
|| GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
|
||||
|| GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
|
||||
|| GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
|
||||
return simplify_gen_subreg (mode, x, innermode, offset);
|
||||
|
||||
/* Otherwise, we can't do this. */
|
||||
@ -2281,7 +2281,7 @@ set_mem_attrs_for_spill (rtx mem)
|
||||
addr = XEXP (mem, 0);
|
||||
offset = const0_rtx;
|
||||
if (GET_CODE (addr) == PLUS
|
||||
&& GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (addr, 1)))
|
||||
offset = XEXP (addr, 1);
|
||||
|
||||
MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
|
||||
|
@ -2820,7 +2820,7 @@ for_each_eh_label (void (*callback) (rtx))
|
||||
{
|
||||
struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
|
||||
if (r && r->region_number == i && r->label
|
||||
&& GET_CODE (r->label) == CODE_LABEL)
|
||||
&& LABEL_P (r->label))
|
||||
(*callback) (r->label);
|
||||
}
|
||||
}
|
||||
@ -4408,19 +4408,19 @@ dump_eh_tree (FILE * out, struct function *fun)
|
||||
if (i->landing_pad)
|
||||
{
|
||||
fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
|
||||
if (GET_CODE (i->landing_pad) == NOTE)
|
||||
if (NOTE_P (i->landing_pad))
|
||||
fprintf (out, " (deleted)");
|
||||
}
|
||||
if (i->post_landing_pad)
|
||||
{
|
||||
fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
|
||||
if (GET_CODE (i->post_landing_pad) == NOTE)
|
||||
if (NOTE_P (i->post_landing_pad))
|
||||
fprintf (out, " (deleted)");
|
||||
}
|
||||
if (i->resume)
|
||||
{
|
||||
fprintf (out, " resume:%i", INSN_UID (i->resume));
|
||||
if (GET_CODE (i->resume) == NOTE)
|
||||
if (NOTE_P (i->resume))
|
||||
fprintf (out, " (deleted)");
|
||||
}
|
||||
if (i->may_contain_throw)
|
||||
|
26
gcc/explow.c
26
gcc/explow.c
@ -153,7 +153,7 @@ plus_constant (rtx x, HOST_WIDE_INT c)
|
||||
We may not immediately return from the recursive call here, lest
|
||||
all_constant gets lost. */
|
||||
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1)))
|
||||
{
|
||||
c += INTVAL (XEXP (x, 1));
|
||||
|
||||
@ -211,10 +211,10 @@ eliminate_constant_term (rtx x, rtx *constptr)
|
||||
return x;
|
||||
|
||||
/* First handle constants appearing at this level explicitly. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
|
||||
XEXP (x, 1)))
|
||||
&& GET_CODE (tem) == CONST_INT)
|
||||
&& CONST_INT_P (tem))
|
||||
{
|
||||
*constptr = tem;
|
||||
return eliminate_constant_term (XEXP (x, 0), constptr);
|
||||
@ -226,7 +226,7 @@ eliminate_constant_term (rtx x, rtx *constptr)
|
||||
if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
|
||||
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
|
||||
*constptr, tem))
|
||||
&& GET_CODE (tem) == CONST_INT)
|
||||
&& CONST_INT_P (tem))
|
||||
{
|
||||
*constptr = tem;
|
||||
return gen_rtx_PLUS (GET_MODE (x), x0, x1);
|
||||
@ -388,7 +388,7 @@ convert_memory_address (enum machine_mode to_mode ATTRIBUTE_UNUSED,
|
||||
narrower. */
|
||||
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|
||||
|| (GET_CODE (x) == PLUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& (XEXP (x, 1) == convert_memory_address (to_mode, XEXP (x, 1))
|
||||
|| POINTERS_EXTEND_UNSIGNED < 0)))
|
||||
return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
|
||||
@ -504,7 +504,7 @@ memory_address (enum machine_mode mode, rtx x)
|
||||
mark_reg_pointer (x, BITS_PER_UNIT);
|
||||
else if (GET_CODE (x) == PLUS
|
||||
&& REG_P (XEXP (x, 0))
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
|
||||
|
||||
/* OLDX may have been the address on a temporary. Update the address
|
||||
@ -551,7 +551,7 @@ use_anchored_address (rtx x)
|
||||
offset = 0;
|
||||
if (GET_CODE (base) == CONST
|
||||
&& GET_CODE (XEXP (base, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (base, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (base, 0), 1)))
|
||||
{
|
||||
offset += INTVAL (XEXP (XEXP (base, 0), 1));
|
||||
base = XEXP (XEXP (base, 0), 0);
|
||||
@ -689,7 +689,7 @@ force_reg (enum machine_mode mode, rtx x)
|
||||
else if (GET_CODE (x) == CONST
|
||||
&& GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)))
|
||||
{
|
||||
rtx s = XEXP (XEXP (x, 0), 0);
|
||||
rtx c = XEXP (XEXP (x, 0), 1);
|
||||
@ -820,7 +820,7 @@ adjust_stack (rtx adjust)
|
||||
|
||||
/* We expect all variable sized adjustments to be multiple of
|
||||
PREFERRED_STACK_BOUNDARY. */
|
||||
if (GET_CODE (adjust) == CONST_INT)
|
||||
if (CONST_INT_P (adjust))
|
||||
stack_pointer_delta -= INTVAL (adjust);
|
||||
|
||||
temp = expand_binop (Pmode,
|
||||
@ -849,7 +849,7 @@ anti_adjust_stack (rtx adjust)
|
||||
|
||||
/* We expect all variable sized adjustments to be multiple of
|
||||
PREFERRED_STACK_BOUNDARY. */
|
||||
if (GET_CODE (adjust) == CONST_INT)
|
||||
if (CONST_INT_P (adjust))
|
||||
stack_pointer_delta += INTVAL (adjust);
|
||||
|
||||
temp = expand_binop (Pmode,
|
||||
@ -876,7 +876,7 @@ round_push (rtx size)
|
||||
if (align == 1)
|
||||
return size;
|
||||
|
||||
if (GET_CODE (size) == CONST_INT)
|
||||
if (CONST_INT_P (size))
|
||||
{
|
||||
HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
|
||||
|
||||
@ -1138,7 +1138,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align)
|
||||
alignment. This constraint may be too strong. */
|
||||
gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT);
|
||||
|
||||
if (GET_CODE (size) == CONST_INT)
|
||||
if (CONST_INT_P (size))
|
||||
{
|
||||
HOST_WIDE_INT new_size = INTVAL (size) / align * align;
|
||||
|
||||
@ -1376,7 +1376,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
|
||||
|
||||
/* If we have to generate explicit probes, see if we have a constant
|
||||
small number of them to generate. If so, that's the easy case. */
|
||||
else if (GET_CODE (size) == CONST_INT
|
||||
else if (CONST_INT_P (size)
|
||||
&& INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
|
||||
{
|
||||
HOST_WIDE_INT offset;
|
||||
|
22
gcc/expmed.c
22
gcc/expmed.c
@ -543,7 +543,7 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
||||
rtx arg0 = op0;
|
||||
|
||||
/* Get appropriate low part of the value being stored. */
|
||||
if (GET_CODE (value) == CONST_INT || REG_P (value))
|
||||
if (CONST_INT_P (value) || REG_P (value))
|
||||
value = gen_lowpart (fieldmode, value);
|
||||
else if (!(GET_CODE (value) == SYMBOL_REF
|
||||
|| GET_CODE (value) == LABEL_REF
|
||||
@ -737,7 +737,7 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
||||
else
|
||||
value1 = gen_lowpart (op_mode, value1);
|
||||
}
|
||||
else if (GET_CODE (value) == CONST_INT)
|
||||
else if (CONST_INT_P (value))
|
||||
value1 = gen_int_mode (INTVAL (value), op_mode);
|
||||
else
|
||||
/* Parse phase is supposed to make VALUE's data type
|
||||
@ -946,7 +946,7 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset,
|
||||
/* Shift VALUE left by BITPOS bits. If VALUE is not constant,
|
||||
we must first convert its mode to MODE. */
|
||||
|
||||
if (GET_CODE (value) == CONST_INT)
|
||||
if (CONST_INT_P (value))
|
||||
{
|
||||
HOST_WIDE_INT v = INTVAL (value);
|
||||
|
||||
@ -1036,7 +1036,7 @@ store_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
||||
/* If VALUE is a constant other than a CONST_INT, get it into a register in
|
||||
WORD_MODE. If we can do this using gen_lowpart_common, do so. Note
|
||||
that VALUE might be a floating-point constant. */
|
||||
if (CONSTANT_P (value) && GET_CODE (value) != CONST_INT)
|
||||
if (CONSTANT_P (value) && !CONST_INT_P (value))
|
||||
{
|
||||
rtx word = gen_lowpart_common (word_mode, value);
|
||||
|
||||
@ -1078,7 +1078,7 @@ store_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
||||
total_bits = GET_MODE_BITSIZE (GET_MODE (value));
|
||||
|
||||
/* Fetch successively less significant portions. */
|
||||
if (GET_CODE (value) == CONST_INT)
|
||||
if (CONST_INT_P (value))
|
||||
part = GEN_INT (((unsigned HOST_WIDE_INT) (INTVAL (value))
|
||||
>> (bitsize - bitsdone - thissize))
|
||||
& (((HOST_WIDE_INT) 1 << thissize) - 1));
|
||||
@ -1093,7 +1093,7 @@ store_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
||||
else
|
||||
{
|
||||
/* Fetch successively more significant portions. */
|
||||
if (GET_CODE (value) == CONST_INT)
|
||||
if (CONST_INT_P (value))
|
||||
part = GEN_INT (((unsigned HOST_WIDE_INT) (INTVAL (value))
|
||||
>> bitsdone)
|
||||
& (((HOST_WIDE_INT) 1 << thissize) - 1));
|
||||
@ -2138,7 +2138,7 @@ expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
|
||||
|
||||
if (SHIFT_COUNT_TRUNCATED)
|
||||
{
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op1)
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (op1) >=
|
||||
(unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))
|
||||
op1 = GEN_INT ((unsigned HOST_WIDE_INT) INTVAL (op1)
|
||||
@ -2155,7 +2155,7 @@ expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted,
|
||||
/* Check whether its cheaper to implement a left shift by a constant
|
||||
bit count by a sequence of additions. */
|
||||
if (code == LSHIFT_EXPR
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (op1)
|
||||
&& INTVAL (op1) > 0
|
||||
&& INTVAL (op1) < GET_MODE_BITSIZE (mode)
|
||||
&& INTVAL (op1) < MAX_BITS_PER_WORD
|
||||
@ -3144,7 +3144,7 @@ expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target,
|
||||
any truncation. This means that multiplying by negative values does
|
||||
not work; results are off by 2^32 on a 32 bit machine. */
|
||||
|
||||
if (GET_CODE (op1) == CONST_INT)
|
||||
if (CONST_INT_P (op1))
|
||||
{
|
||||
/* Attempt to handle multiplication of DImode values by negative
|
||||
coefficients, by performing the multiplication by a positive
|
||||
@ -3846,7 +3846,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
|
||||
static HOST_WIDE_INT ext_op1;
|
||||
bool speed = optimize_insn_for_speed_p ();
|
||||
|
||||
op1_is_constant = GET_CODE (op1) == CONST_INT;
|
||||
op1_is_constant = CONST_INT_P (op1);
|
||||
if (op1_is_constant)
|
||||
{
|
||||
ext_op1 = INTVAL (op1);
|
||||
@ -3990,7 +3990,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
|
||||
|
||||
/* convert_modes may have placed op1 into a register, so we
|
||||
must recompute the following. */
|
||||
op1_is_constant = GET_CODE (op1) == CONST_INT;
|
||||
op1_is_constant = CONST_INT_P (op1);
|
||||
op1_is_pow2 = (op1_is_constant
|
||||
&& ((EXACT_POWER_OF_2_OR_ZERO_P (INTVAL (op1))
|
||||
|| (! unsignedp
|
||||
|
48
gcc/expr.c
48
gcc/expr.c
@ -772,7 +772,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
|
||||
|
||||
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
|
||||
&& CONST_INT_P (x) && INTVAL (x) < 0)
|
||||
{
|
||||
HOST_WIDE_INT val = INTVAL (x);
|
||||
|
||||
@ -793,7 +793,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
|
||||
non-volatile MEM. Except for the constant case where MODE is no
|
||||
wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
|
||||
|
||||
if ((GET_CODE (x) == CONST_INT
|
||||
if ((CONST_INT_P (x)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
|
||||
|| (GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& GET_MODE_CLASS (oldmode) == MODE_INT
|
||||
@ -810,7 +810,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
|
||||
/* ?? If we don't know OLDMODE, we have to assume here that
|
||||
X does not need sign- or zero-extension. This may not be
|
||||
the case, but it's the best we can do. */
|
||||
if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
|
||||
if (CONST_INT_P (x) && oldmode != VOIDmode
|
||||
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
|
||||
{
|
||||
HOST_WIDE_INT val = INTVAL (x);
|
||||
@ -1199,7 +1199,7 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
|
||||
|
||||
/* Set MEM_SIZE as appropriate for this block copy. The main place this
|
||||
can be incorrect is coming from __builtin_memcpy. */
|
||||
if (GET_CODE (size) == CONST_INT)
|
||||
if (CONST_INT_P (size))
|
||||
{
|
||||
if (INTVAL (size) == 0)
|
||||
return 0;
|
||||
@ -1210,7 +1210,7 @@ emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
|
||||
set_mem_size (y, size);
|
||||
}
|
||||
|
||||
if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
|
||||
if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
|
||||
move_by_pieces (x, y, INTVAL (size), align, 0);
|
||||
else if (emit_block_move_via_movmem (x, y, size, align,
|
||||
expected_align, expected_size))
|
||||
@ -1313,7 +1313,7 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
|
||||
here because if SIZE is less than the mode mask, as it is
|
||||
returned by the macro, it will definitely be less than the
|
||||
actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((CONST_INT_P (size)
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
@ -2608,7 +2608,7 @@ clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
|
||||
/* If OBJECT is not BLKmode and SIZE is the same size as its mode,
|
||||
just move a zero. Otherwise, do this a piece at a time. */
|
||||
if (mode != BLKmode
|
||||
&& GET_CODE (size) == CONST_INT
|
||||
&& CONST_INT_P (size)
|
||||
&& INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
|
||||
{
|
||||
rtx zero = CONST0_RTX (mode);
|
||||
@ -2635,7 +2635,7 @@ clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
|
||||
|
||||
align = MEM_ALIGN (object);
|
||||
|
||||
if (GET_CODE (size) == CONST_INT
|
||||
if (CONST_INT_P (size)
|
||||
&& CLEAR_BY_PIECES_P (INTVAL (size), align))
|
||||
clear_by_pieces (object, INTVAL (size), align);
|
||||
else if (set_storage_via_setmem (object, size, const0_rtx, align,
|
||||
@ -2681,7 +2681,7 @@ set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
|
||||
for returning pointers, we could end up generating incorrect code. */
|
||||
|
||||
object_tree = make_tree (ptr_type_node, object);
|
||||
if (GET_CODE (val) != CONST_INT)
|
||||
if (!CONST_INT_P (val))
|
||||
val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
|
||||
size_tree = make_tree (sizetype, size);
|
||||
val_tree = make_tree (integer_type_node, val);
|
||||
@ -2774,7 +2774,7 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
|
||||
BITS_PER_HOST_WIDE_INT here because if SIZE is less than
|
||||
the mode mask, as it is returned by the macro, it will
|
||||
definitely be less than the actual mode mask. */
|
||||
&& ((GET_CODE (size) == CONST_INT
|
||||
&& ((CONST_INT_P (size)
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (size)
|
||||
<= (GET_MODE_MASK (mode) >> 1)))
|
||||
|| GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
|
||||
@ -3046,7 +3046,7 @@ emit_move_resolve_push (enum machine_mode mode, rtx x)
|
||||
HOST_WIDE_INT val;
|
||||
|
||||
gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
|
||||
gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
|
||||
gcc_assert (CONST_INT_P (XEXP (expr, 1)));
|
||||
val = INTVAL (XEXP (expr, 1));
|
||||
if (GET_CODE (expr) == MINUS)
|
||||
val = -val;
|
||||
@ -3572,7 +3572,7 @@ push_block (rtx size, int extra, int below)
|
||||
}
|
||||
else
|
||||
{
|
||||
if (GET_CODE (size) == CONST_INT)
|
||||
if (CONST_INT_P (size))
|
||||
temp = plus_constant (virtual_outgoing_args_rtx,
|
||||
-INTVAL (size) - (below ? 0 : extra));
|
||||
else if (extra != 0 && !below)
|
||||
@ -3783,7 +3783,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
|
||||
on the stack for alignment purposes. */
|
||||
if (args_addr == 0
|
||||
&& PUSH_ARGS
|
||||
&& GET_CODE (size) == CONST_INT
|
||||
&& CONST_INT_P (size)
|
||||
&& skip == 0
|
||||
&& MEM_ALIGN (xinner) >= align
|
||||
&& (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
|
||||
@ -3816,7 +3816,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
|
||||
/* Deduct words put into registers from the size we must copy. */
|
||||
if (partial != 0)
|
||||
{
|
||||
if (GET_CODE (size) == CONST_INT)
|
||||
if (CONST_INT_P (size))
|
||||
size = GEN_INT (INTVAL (size) - used);
|
||||
else
|
||||
size = expand_binop (GET_MODE (size), sub_optab, size,
|
||||
@ -3832,7 +3832,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
|
||||
temp = push_block (size, extra, where_pad == downward);
|
||||
extra = 0;
|
||||
}
|
||||
else if (GET_CODE (args_so_far) == CONST_INT)
|
||||
else if (CONST_INT_P (args_so_far))
|
||||
temp = memory_address (BLKmode,
|
||||
plus_constant (args_addr,
|
||||
skip + INTVAL (args_so_far)));
|
||||
@ -3948,7 +3948,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
|
||||
else
|
||||
#endif
|
||||
{
|
||||
if (GET_CODE (args_so_far) == CONST_INT)
|
||||
if (CONST_INT_P (args_so_far))
|
||||
addr
|
||||
= memory_address (mode,
|
||||
plus_constant (args_addr,
|
||||
@ -4732,7 +4732,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
|
||||
type of the string, which is actually the size of the target. */
|
||||
rtx size = expr_size (exp);
|
||||
|
||||
if (GET_CODE (size) == CONST_INT
|
||||
if (CONST_INT_P (size)
|
||||
&& INTVAL (size) < TREE_STRING_LENGTH (exp))
|
||||
emit_block_move (target, temp, size,
|
||||
(call_param_p
|
||||
@ -4759,7 +4759,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
|
||||
|
||||
/* Figure out how much is left in TARGET that we have to clear.
|
||||
Do all calculations in ptr_mode. */
|
||||
if (GET_CODE (copy_size_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (copy_size_rtx))
|
||||
{
|
||||
size = plus_constant (size, -INTVAL (copy_size_rtx));
|
||||
target = adjust_address (target, BLKmode,
|
||||
@ -6343,7 +6343,7 @@ force_operand (rtx value, rtx target)
|
||||
op2 = XEXP (value, 1);
|
||||
if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
|
||||
subtarget = 0;
|
||||
if (code == MINUS && GET_CODE (op2) == CONST_INT)
|
||||
if (code == MINUS && CONST_INT_P (op2))
|
||||
{
|
||||
code = PLUS;
|
||||
op2 = negate_rtx (GET_MODE (value), op2);
|
||||
@ -6355,7 +6355,7 @@ force_operand (rtx value, rtx target)
|
||||
constant first and then add the other value. This allows virtual
|
||||
register instantiation to simply modify the constant rather than
|
||||
creating another one around this addition. */
|
||||
if (code == PLUS && GET_CODE (op2) == CONST_INT
|
||||
if (code == PLUS && CONST_INT_P (op2)
|
||||
&& GET_CODE (XEXP (value, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (value, 0), 0))
|
||||
&& REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
|
||||
@ -8640,7 +8640,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
|
||||
/* If the last operand is a CONST_INT, use plus_constant of
|
||||
the negated constant. Else make the MINUS. */
|
||||
if (GET_CODE (op1) == CONST_INT)
|
||||
if (CONST_INT_P (op1))
|
||||
return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
|
||||
else
|
||||
return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
|
||||
@ -8658,7 +8658,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
subtarget, &op0, &op1, modifier);
|
||||
|
||||
/* Convert A - const to A + (-const). */
|
||||
if (GET_CODE (op1) == CONST_INT)
|
||||
if (CONST_INT_P (op1))
|
||||
{
|
||||
op1 = negate_rtx (mode, op1);
|
||||
return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
|
||||
@ -9123,7 +9123,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
VOIDmode, EXPAND_NORMAL);
|
||||
|
||||
/* If temp is constant, we can just compute the result. */
|
||||
if (GET_CODE (temp) == CONST_INT)
|
||||
if (CONST_INT_P (temp))
|
||||
{
|
||||
if (INTVAL (temp) != 0)
|
||||
emit_move_insn (target, const1_rtx);
|
||||
@ -9567,7 +9567,7 @@ reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
|
||||
if (target && GET_MODE (target) != GET_MODE (exp))
|
||||
target = 0;
|
||||
/* For constant values, reduce using build_int_cst_type. */
|
||||
if (GET_CODE (exp) == CONST_INT)
|
||||
if (CONST_INT_P (exp))
|
||||
{
|
||||
HOST_WIDE_INT value = INTVAL (exp);
|
||||
tree t = build_int_cst_type (type, value);
|
||||
|
12
gcc/final.c
12
gcc/final.c
@ -3127,7 +3127,7 @@ get_mem_expr_from_op (rtx op, int *paddressp)
|
||||
&& (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
|
||||
return expr;
|
||||
|
||||
while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
|
||||
while (UNARY_P (op)
|
||||
|| GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
|
||||
op = XEXP (op, 0);
|
||||
|
||||
@ -3330,7 +3330,7 @@ output_asm_insn (const char *templ, rtx *operands)
|
||||
}
|
||||
else if (letter == 'n')
|
||||
{
|
||||
if (GET_CODE (operands[opnum]) == CONST_INT)
|
||||
if (CONST_INT_P (operands[opnum]))
|
||||
fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
|
||||
- INTVAL (operands[opnum]));
|
||||
else
|
||||
@ -3562,7 +3562,7 @@ output_addr_const (FILE *file, rtx x)
|
||||
|
||||
case PLUS:
|
||||
/* Some assemblers need integer constants to appear last (eg masm). */
|
||||
if (GET_CODE (XEXP (x, 0)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 0)))
|
||||
{
|
||||
output_addr_const (file, XEXP (x, 1));
|
||||
if (INTVAL (XEXP (x, 0)) >= 0)
|
||||
@ -3572,7 +3572,7 @@ output_addr_const (FILE *file, rtx x)
|
||||
else
|
||||
{
|
||||
output_addr_const (file, XEXP (x, 0));
|
||||
if (GET_CODE (XEXP (x, 1)) != CONST_INT
|
||||
if (!CONST_INT_P (XEXP (x, 1))
|
||||
|| INTVAL (XEXP (x, 1)) >= 0)
|
||||
fprintf (file, "+");
|
||||
output_addr_const (file, XEXP (x, 1));
|
||||
@ -3588,7 +3588,7 @@ output_addr_const (FILE *file, rtx x)
|
||||
|
||||
output_addr_const (file, XEXP (x, 0));
|
||||
fprintf (file, "-");
|
||||
if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
|
||||
if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
|
||||
|| GET_CODE (XEXP (x, 1)) == PC
|
||||
|| GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
|
||||
output_addr_const (file, XEXP (x, 1));
|
||||
@ -3796,7 +3796,7 @@ asm_fprintf (FILE *file, const char *p, ...)
|
||||
void
|
||||
split_double (rtx value, rtx *first, rtx *second)
|
||||
{
|
||||
if (GET_CODE (value) == CONST_INT)
|
||||
if (CONST_INT_P (value))
|
||||
{
|
||||
if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
|
||||
{
|
||||
|
@ -654,7 +654,7 @@ find_temp_slot_from_address (rtx x)
|
||||
/* Last resort: Address is a virtual stack var address. */
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& XEXP (x, 0) == virtual_stack_vars_rtx
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
{
|
||||
int i;
|
||||
for (i = max_slot_level (); i >= 0; i--)
|
||||
@ -1457,7 +1457,7 @@ instantiate_virtual_regs_in_insn (rtx insn)
|
||||
&& recog_data.n_operands >= 3
|
||||
&& recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
|
||||
&& recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
|
||||
&& GET_CODE (recog_data.operand[2]) == CONST_INT
|
||||
&& CONST_INT_P (recog_data.operand[2])
|
||||
&& (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
|
||||
{
|
||||
offset += INTVAL (recog_data.operand[2]);
|
||||
@ -1783,7 +1783,7 @@ instantiate_virtual_regs (void)
|
||||
for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
|
||||
|
||||
/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
|
||||
if (GET_CODE (insn) == CALL_INSN)
|
||||
if (CALL_P (insn))
|
||||
for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
|
||||
instantiate_virtual_regs_in_rtx, NULL);
|
||||
}
|
||||
@ -2458,7 +2458,7 @@ assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
|
||||
up with a guess at the alignment based on OFFSET_RTX. */
|
||||
if (data->locate.where_pad != downward || data->entry_parm)
|
||||
align = boundary;
|
||||
else if (GET_CODE (offset_rtx) == CONST_INT)
|
||||
else if (CONST_INT_P (offset_rtx))
|
||||
{
|
||||
align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
|
||||
align = align & -align;
|
||||
|
@ -254,7 +254,7 @@ canonicalize_address (rtx x)
|
||||
switch (GET_CODE (x))
|
||||
{
|
||||
case ASHIFT:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0)
|
||||
{
|
||||
@ -574,7 +574,7 @@ propagate_rtx (rtx x, enum machine_mode mode, rtx old_rtx, rtx new_rtx,
|
||||
|
||||
/* gen_lowpart_common will not be able to process VOIDmode entities other
|
||||
than CONST_INTs. */
|
||||
if (GET_MODE (tem) == VOIDmode && GET_CODE (tem) != CONST_INT)
|
||||
if (GET_MODE (tem) == VOIDmode && !CONST_INT_P (tem))
|
||||
return NULL_RTX;
|
||||
|
||||
if (GET_MODE (tem) == VOIDmode)
|
||||
|
@ -1287,8 +1287,8 @@ gcse_constant_p (const_rtx x)
|
||||
{
|
||||
/* Consider a COMPARE of two integers constant. */
|
||||
if (GET_CODE (x) == COMPARE
|
||||
&& GET_CODE (XEXP (x, 0)) == CONST_INT
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 0))
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return true;
|
||||
|
||||
/* Consider a COMPARE of the same registers is a constant
|
||||
|
@ -1115,7 +1115,7 @@ write_tm_constrs_h (void)
|
||||
puts (" const REAL_VALUE_TYPE *rval = 0;");
|
||||
|
||||
if (needs_ival)
|
||||
puts (" if (GET_CODE (op) == CONST_INT)\n"
|
||||
puts (" if (CONST_INT_P (op))\n"
|
||||
" ival = INTVAL (op);");
|
||||
if (needs_hval)
|
||||
puts (" if (GET_CODE (op) == CONST_DOUBLE && mode == VOIDmode)"
|
||||
|
@ -795,7 +795,7 @@ validate_pattern (rtx pattern, rtx insn, rtx set, int set_code)
|
||||
&& GET_CODE (dest) != CC0
|
||||
&& GET_CODE (src) != PC
|
||||
&& GET_CODE (src) != CC0
|
||||
&& GET_CODE (src) != CONST_INT
|
||||
&& !CONST_INT_P (src)
|
||||
&& GET_CODE (src) != CALL)
|
||||
{
|
||||
const char *which;
|
||||
|
28
gcc/ifcvt.c
28
gcc/ifcvt.c
@ -905,12 +905,12 @@ noce_try_store_flag (struct noce_if_info *if_info)
|
||||
int reversep;
|
||||
rtx target, seq;
|
||||
|
||||
if (GET_CODE (if_info->b) == CONST_INT
|
||||
if (CONST_INT_P (if_info->b)
|
||||
&& INTVAL (if_info->b) == STORE_FLAG_VALUE
|
||||
&& if_info->a == const0_rtx)
|
||||
reversep = 0;
|
||||
else if (if_info->b == const0_rtx
|
||||
&& GET_CODE (if_info->a) == CONST_INT
|
||||
&& CONST_INT_P (if_info->a)
|
||||
&& INTVAL (if_info->a) == STORE_FLAG_VALUE
|
||||
&& (reversed_comparison_code (if_info->cond, if_info->jump)
|
||||
!= UNKNOWN))
|
||||
@ -952,8 +952,8 @@ noce_try_store_flag_constants (struct noce_if_info *if_info)
|
||||
int normalize, can_reverse;
|
||||
enum machine_mode mode;
|
||||
|
||||
if (GET_CODE (if_info->a) == CONST_INT
|
||||
&& GET_CODE (if_info->b) == CONST_INT)
|
||||
if (CONST_INT_P (if_info->a)
|
||||
&& CONST_INT_P (if_info->b))
|
||||
{
|
||||
mode = GET_MODE (if_info->x);
|
||||
ifalse = INTVAL (if_info->a);
|
||||
@ -1538,7 +1538,7 @@ noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
|
||||
make equivalent types of changes) to get the constants we need
|
||||
if they're off by one in the right direction. */
|
||||
|
||||
if (GET_CODE (target) == CONST_INT)
|
||||
if (CONST_INT_P (target))
|
||||
{
|
||||
enum rtx_code code = GET_CODE (if_info->cond);
|
||||
rtx op_a = XEXP (if_info->cond, 0);
|
||||
@ -1555,14 +1555,14 @@ noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
|
||||
rtx src = find_reg_equal_equiv_note (prev_insn);
|
||||
if (!src)
|
||||
src = SET_SRC (PATTERN (prev_insn));
|
||||
if (GET_CODE (src) == CONST_INT)
|
||||
if (CONST_INT_P (src))
|
||||
{
|
||||
if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
|
||||
op_a = src;
|
||||
else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
|
||||
op_b = src;
|
||||
|
||||
if (GET_CODE (op_a) == CONST_INT)
|
||||
if (CONST_INT_P (op_a))
|
||||
{
|
||||
rtx tmp = op_a;
|
||||
op_a = op_b;
|
||||
@ -1574,7 +1574,7 @@ noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
|
||||
|
||||
/* Now, look to see if we can get the right constant by
|
||||
adjusting the conditional. */
|
||||
if (GET_CODE (op_b) == CONST_INT)
|
||||
if (CONST_INT_P (op_b))
|
||||
{
|
||||
HOST_WIDE_INT desired_val = INTVAL (target);
|
||||
HOST_WIDE_INT actual_val = INTVAL (op_b);
|
||||
@ -1973,7 +1973,7 @@ noce_try_bitop (struct noce_if_info *if_info)
|
||||
if (GET_CODE (cond) == ZERO_EXTRACT)
|
||||
{
|
||||
if (XEXP (cond, 1) != const1_rtx
|
||||
|| GET_CODE (XEXP (cond, 2)) != CONST_INT
|
||||
|| !CONST_INT_P (XEXP (cond, 2))
|
||||
|| ! rtx_equal_p (x, XEXP (cond, 0)))
|
||||
return FALSE;
|
||||
bitnum = INTVAL (XEXP (cond, 2));
|
||||
@ -1991,7 +1991,7 @@ noce_try_bitop (struct noce_if_info *if_info)
|
||||
{
|
||||
/* Check for "if (X & C) x = x op C". */
|
||||
if (! rtx_equal_p (x, XEXP (a, 0))
|
||||
|| GET_CODE (XEXP (a, 1)) != CONST_INT
|
||||
|| !CONST_INT_P (XEXP (a, 1))
|
||||
|| (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
|
||||
!= (unsigned HOST_WIDE_INT) 1 << bitnum)
|
||||
return FALSE;
|
||||
@ -2017,7 +2017,7 @@ noce_try_bitop (struct noce_if_info *if_info)
|
||||
{
|
||||
/* Check for "if (X & C) x &= ~C". */
|
||||
if (! rtx_equal_p (x, XEXP (a, 0))
|
||||
|| GET_CODE (XEXP (a, 1)) != CONST_INT
|
||||
|| !CONST_INT_P (XEXP (a, 1))
|
||||
|| (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
|
||||
!= (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
|
||||
return FALSE;
|
||||
@ -2142,7 +2142,7 @@ noce_mem_write_may_trap_or_fault_p (const_rtx mem)
|
||||
addr = XEXP (addr, 1);
|
||||
break;
|
||||
case PLUS:
|
||||
if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (addr, 1)))
|
||||
addr = XEXP (addr, 0);
|
||||
else
|
||||
return false;
|
||||
@ -2298,8 +2298,8 @@ noce_process_if_block (struct noce_if_info *if_info)
|
||||
return FALSE;
|
||||
|
||||
if (GET_CODE (x) == ZERO_EXTRACT
|
||||
&& (GET_CODE (XEXP (x, 1)) != CONST_INT
|
||||
|| GET_CODE (XEXP (x, 2)) != CONST_INT))
|
||||
&& (!CONST_INT_P (XEXP (x, 1))
|
||||
|| !CONST_INT_P (XEXP (x, 2))))
|
||||
return FALSE;
|
||||
|
||||
x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
|
||||
|
@ -429,7 +429,7 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
||||
break;
|
||||
|
||||
case 's':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
break;
|
||||
@ -441,7 +441,7 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
win = 1;
|
||||
@ -455,7 +455,7 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
||||
case 'N':
|
||||
case 'O':
|
||||
case 'P':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
|
||||
win = 1;
|
||||
break;
|
||||
|
@ -630,21 +630,21 @@ single_reg_class (const char *constraints, rtx op, rtx equiv_const)
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
|
||||
|| (equiv_const != NULL_RTX
|
||||
&& (GET_CODE (equiv_const) == CONST_INT
|
||||
&& (CONST_INT_P (equiv_const)
|
||||
|| (GET_CODE (equiv_const) == CONST_DOUBLE
|
||||
&& GET_MODE (equiv_const) == VOIDmode))))
|
||||
return NO_REGS;
|
||||
break;
|
||||
|
||||
case 's':
|
||||
if ((CONSTANT_P (op) && GET_CODE (op) != CONST_INT
|
||||
if ((CONSTANT_P (op) && !CONST_INT_P (op)
|
||||
&& (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
|
||||
|| (equiv_const != NULL_RTX
|
||||
&& CONSTANT_P (equiv_const)
|
||||
&& GET_CODE (equiv_const) != CONST_INT
|
||||
&& !CONST_INT_P (equiv_const)
|
||||
&& (GET_CODE (equiv_const) != CONST_DOUBLE
|
||||
|| GET_MODE (equiv_const) != VOIDmode)))
|
||||
return NO_REGS;
|
||||
@ -658,10 +658,10 @@ single_reg_class (const char *constraints, rtx op, rtx equiv_const)
|
||||
case 'N':
|
||||
case 'O':
|
||||
case 'P':
|
||||
if ((GET_CODE (op) == CONST_INT
|
||||
if ((CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
|
||||
|| (equiv_const != NULL_RTX
|
||||
&& GET_CODE (equiv_const) == CONST_INT
|
||||
&& CONST_INT_P (equiv_const)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
|
||||
c, constraints)))
|
||||
return NO_REGS;
|
||||
|
10
gcc/jump.c
10
gcc/jump.c
@ -391,7 +391,7 @@ reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
|
||||
|
||||
/* Test for an integer condition, or a floating-point comparison
|
||||
in which NaNs can be ignored. */
|
||||
if (GET_CODE (arg0) == CONST_INT
|
||||
if (CONST_INT_P (arg0)
|
||||
|| (GET_MODE (arg0) != VOIDmode
|
||||
&& GET_MODE_CLASS (mode) != MODE_CC
|
||||
&& !HONOR_NANS (mode)))
|
||||
@ -1205,9 +1205,7 @@ delete_related_insns (rtx insn)
|
||||
|
||||
/* Likewise if we're deleting a dispatch table. */
|
||||
|
||||
if (JUMP_P (insn)
|
||||
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
|
||||
if (JUMP_TABLE_DATA_P (insn))
|
||||
{
|
||||
rtx pat = PATTERN (insn);
|
||||
int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
|
||||
@ -1241,9 +1239,7 @@ delete_related_insns (rtx insn)
|
||||
|
||||
if (was_code_label
|
||||
&& NEXT_INSN (insn) != 0
|
||||
&& JUMP_P (NEXT_INSN (insn))
|
||||
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
|
||||
&& JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
|
||||
next = delete_related_insns (NEXT_INSN (insn));
|
||||
|
||||
/* If INSN was a label, delete insns following it if now unreachable. */
|
||||
|
@ -1567,11 +1567,11 @@ implies_p (rtx a, rtx b)
|
||||
|
||||
/* A != N is equivalent to A - (N + 1) <u -1. */
|
||||
if (GET_CODE (a) == NE
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (op1)
|
||||
&& GET_CODE (b) == LTU
|
||||
&& opb1 == constm1_rtx
|
||||
&& GET_CODE (opb0) == PLUS
|
||||
&& GET_CODE (XEXP (opb0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (opb0, 1))
|
||||
/* Avoid overflows. */
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
|
||||
!= ((unsigned HOST_WIDE_INT)1
|
||||
@ -1581,12 +1581,12 @@ implies_p (rtx a, rtx b)
|
||||
|
||||
/* Likewise, A != N implies A - N > 0. */
|
||||
if (GET_CODE (a) == NE
|
||||
&& GET_CODE (op1) == CONST_INT)
|
||||
&& CONST_INT_P (op1))
|
||||
{
|
||||
if (GET_CODE (b) == GTU
|
||||
&& GET_CODE (opb0) == PLUS
|
||||
&& opb1 == const0_rtx
|
||||
&& GET_CODE (XEXP (opb0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (opb0, 1))
|
||||
/* Avoid overflows. */
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
|
||||
!= ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
|
||||
@ -1595,7 +1595,7 @@ implies_p (rtx a, rtx b)
|
||||
if (GET_CODE (b) == GEU
|
||||
&& GET_CODE (opb0) == PLUS
|
||||
&& opb1 == const1_rtx
|
||||
&& GET_CODE (XEXP (opb0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (opb0, 1))
|
||||
/* Avoid overflows. */
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (XEXP (opb0, 1))
|
||||
!= ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
|
||||
@ -1605,11 +1605,11 @@ implies_p (rtx a, rtx b)
|
||||
|
||||
/* A >s X, where X is positive, implies A <u Y, if Y is negative. */
|
||||
if ((GET_CODE (a) == GT || GET_CODE (a) == GE)
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (op1)
|
||||
&& ((GET_CODE (a) == GT && op1 == constm1_rtx)
|
||||
|| INTVAL (op1) >= 0)
|
||||
&& GET_CODE (b) == LTU
|
||||
&& GET_CODE (opb1) == CONST_INT
|
||||
&& CONST_INT_P (opb1)
|
||||
&& rtx_equal_p (op0, opb0))
|
||||
return INTVAL (opb1) < 0;
|
||||
|
||||
@ -1648,7 +1648,7 @@ canon_condition (rtx cond)
|
||||
mode = GET_MODE (op1);
|
||||
gcc_assert (mode != VOIDmode);
|
||||
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op1)
|
||||
&& GET_MODE_CLASS (mode) != MODE_CC
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -2202,7 +2202,7 @@ determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
|
||||
unsigned HOST_WIDEST_INT nmax, inc;
|
||||
|
||||
if (GET_CODE (niter) == AND
|
||||
&& GET_CODE (XEXP (niter, 0)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (niter, 0)))
|
||||
{
|
||||
nmax = INTVAL (XEXP (niter, 0));
|
||||
if (!(nmax & (nmax + 1)))
|
||||
@ -2217,7 +2217,7 @@ determine_max_iter (struct loop *loop, struct niter_desc *desc, rtx old_niter)
|
||||
|
||||
if (GET_CODE (niter) == UDIV)
|
||||
{
|
||||
if (GET_CODE (XEXP (niter, 1)) != CONST_INT)
|
||||
if (!CONST_INT_P (XEXP (niter, 1)))
|
||||
{
|
||||
desc->niter_max = nmax;
|
||||
return nmax;
|
||||
@ -2345,7 +2345,7 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
|
||||
mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
|
||||
mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
|
||||
|
||||
if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
|
||||
if (!CONST_INT_P (iv0.step) || !CONST_INT_P (iv1.step))
|
||||
goto fail;
|
||||
|
||||
/* We can take care of the case of two induction variables chasing each other
|
||||
@ -2476,7 +2476,7 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
|
||||
may_xform = const0_rtx;
|
||||
may_not_xform = const_true_rtx;
|
||||
|
||||
if (GET_CODE (delta) == CONST_INT)
|
||||
if (CONST_INT_P (delta))
|
||||
{
|
||||
if (was_sharp && INTVAL (delta) == INTVAL (step) - 1)
|
||||
{
|
||||
@ -2539,11 +2539,11 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
|
||||
number of iterations in this step, so record the information
|
||||
here. */
|
||||
inc = INTVAL (iv0.step) - INTVAL (iv1.step);
|
||||
if (GET_CODE (iv1.base) == CONST_INT)
|
||||
if (CONST_INT_P (iv1.base))
|
||||
up = INTVAL (iv1.base);
|
||||
else
|
||||
up = INTVAL (mode_mmax) - inc;
|
||||
down = INTVAL (GET_CODE (iv0.base) == CONST_INT
|
||||
down = INTVAL (CONST_INT_P (iv0.base)
|
||||
? iv0.base
|
||||
: mode_mmin);
|
||||
desc->niter_max = (up - down) / inc + 1;
|
||||
@ -2752,7 +2752,7 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
|
||||
&& XEXP (desc->noloop_assumptions, 0) == const_true_rtx)
|
||||
goto zero_iter;
|
||||
|
||||
if (GET_CODE (desc->niter_expr) == CONST_INT)
|
||||
if (CONST_INT_P (desc->niter_expr))
|
||||
{
|
||||
unsigned HOST_WIDEST_INT val = INTVAL (desc->niter_expr);
|
||||
|
||||
|
@ -922,7 +922,7 @@ find_decomposable_shift_zext (rtx insn)
|
||||
}
|
||||
else /* left or right shift */
|
||||
{
|
||||
if (GET_CODE (XEXP (op, 1)) != CONST_INT
|
||||
if (!CONST_INT_P (XEXP (op, 1))
|
||||
|| INTVAL (XEXP (op, 1)) < BITS_PER_WORD
|
||||
|| GET_MODE_BITSIZE (GET_MODE (op_operand)) != 2 * BITS_PER_WORD)
|
||||
return 0;
|
||||
|
@ -353,7 +353,7 @@ const_iteration_count (rtx count_reg, basic_block pre_header,
|
||||
{
|
||||
rtx pat = single_set (insn);
|
||||
|
||||
if (GET_CODE (SET_SRC (pat)) == CONST_INT)
|
||||
if (CONST_INT_P (SET_SRC (pat)))
|
||||
{
|
||||
*count = INTVAL (SET_SRC (pat));
|
||||
return insn;
|
||||
|
16
gcc/optabs.c
16
gcc/optabs.c
@ -1093,7 +1093,7 @@ expand_doubleword_shift (enum machine_mode op1_mode, optab binoptab,
|
||||
/* If we can compute the condition at compile time, pick the
|
||||
appropriate subroutine. */
|
||||
tmp = simplify_relational_operation (cmp_code, SImode, op1_mode, cmp1, cmp2);
|
||||
if (tmp != 0 && GET_CODE (tmp) == CONST_INT)
|
||||
if (tmp != 0 && CONST_INT_P (tmp))
|
||||
{
|
||||
if (tmp == const0_rtx)
|
||||
return expand_superword_shift (binoptab, outof_input, superword_op1,
|
||||
@ -1395,7 +1395,7 @@ avoid_expensive_constant (enum machine_mode mode, optab binoptab,
|
||||
&& rtx_cost (x, binoptab->code, optimize_insn_for_speed_p ())
|
||||
> COSTS_N_INSNS (1))
|
||||
{
|
||||
if (GET_CODE (x) == CONST_INT)
|
||||
if (CONST_INT_P (x))
|
||||
{
|
||||
HOST_WIDE_INT intval = trunc_int_for_mode (INTVAL (x), mode);
|
||||
if (intval != INTVAL (x))
|
||||
@ -1562,7 +1562,7 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
|
||||
/* If subtracting an integer constant, convert this into an addition of
|
||||
the negated constant. */
|
||||
|
||||
if (binoptab == sub_optab && GET_CODE (op1) == CONST_INT)
|
||||
if (binoptab == sub_optab && CONST_INT_P (op1))
|
||||
{
|
||||
op1 = negate_rtx (mode, op1);
|
||||
binoptab = add_optab;
|
||||
@ -1594,7 +1594,7 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
|
||||
rtx newop1;
|
||||
unsigned int bits = GET_MODE_BITSIZE (mode);
|
||||
|
||||
if (GET_CODE (op1) == CONST_INT)
|
||||
if (CONST_INT_P (op1))
|
||||
newop1 = GEN_INT (bits - INTVAL (op1));
|
||||
else if (targetm.shift_truncation_mask (mode) == bits - 1)
|
||||
newop1 = negate_rtx (mode, op1);
|
||||
@ -1765,7 +1765,7 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
|
||||
if ((binoptab == lshr_optab || binoptab == ashl_optab
|
||||
|| binoptab == ashr_optab)
|
||||
&& mclass == MODE_INT
|
||||
&& (GET_CODE (op1) == CONST_INT || optimize_insn_for_speed_p ())
|
||||
&& (CONST_INT_P (op1) || optimize_insn_for_speed_p ())
|
||||
&& GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
|
||||
&& optab_handler (binoptab, word_mode)->insn_code != CODE_FOR_nothing
|
||||
&& optab_handler (ashl_optab, word_mode)->insn_code != CODE_FOR_nothing
|
||||
@ -1779,7 +1779,7 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
|
||||
op1_mode = GET_MODE (op1) != VOIDmode ? GET_MODE (op1) : word_mode;
|
||||
|
||||
/* Apply the truncation to constant shifts. */
|
||||
if (double_shift_mask > 0 && GET_CODE (op1) == CONST_INT)
|
||||
if (double_shift_mask > 0 && CONST_INT_P (op1))
|
||||
op1 = GEN_INT (INTVAL (op1) & double_shift_mask);
|
||||
|
||||
if (op1 == CONST0_RTX (op1_mode))
|
||||
@ -1835,7 +1835,7 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
|
||||
/* Synthesize double word rotates from single word shifts. */
|
||||
if ((binoptab == rotl_optab || binoptab == rotr_optab)
|
||||
&& mclass == MODE_INT
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (op1)
|
||||
&& GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
|
||||
&& optab_handler (ashl_optab, word_mode)->insn_code != CODE_FOR_nothing
|
||||
&& optab_handler (lshr_optab, word_mode)->insn_code != CODE_FOR_nothing)
|
||||
@ -4068,7 +4068,7 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
|
||||
continue;
|
||||
|
||||
/* Must make sure the size fits the insn's mode. */
|
||||
if ((GET_CODE (size) == CONST_INT
|
||||
if ((CONST_INT_P (size)
|
||||
&& INTVAL (size) >= (1 << GET_MODE_BITSIZE (cmp_mode)))
|
||||
|| (GET_MODE_BITSIZE (GET_MODE (size))
|
||||
> GET_MODE_BITSIZE (cmp_mode)))
|
||||
|
@ -284,7 +284,7 @@ reload_cse_simplify_set (rtx set, rtx insn)
|
||||
|
||||
/* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
|
||||
constants, such as SYMBOL_REF, cannot be extended. */
|
||||
if (GET_CODE (this_rtx) != CONST_INT)
|
||||
if (!CONST_INT_P (this_rtx))
|
||||
continue;
|
||||
|
||||
this_val = INTVAL (this_rtx);
|
||||
@ -570,7 +570,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg)
|
||||
a cheap CONST_INT. */
|
||||
if (op_alt_regno[i][j] == -1
|
||||
&& reg_fits_class_p (testreg, rclass, 0, mode)
|
||||
&& (GET_CODE (recog_data.operand[i]) != CONST_INT
|
||||
&& (!CONST_INT_P (recog_data.operand[i])
|
||||
|| (rtx_cost (recog_data.operand[i], SET,
|
||||
optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
|
||||
> rtx_cost (testreg, SET,
|
||||
@ -860,7 +860,7 @@ reload_combine (void)
|
||||
(REGY), i.e. BASE, is not clobbered before the last use we'll
|
||||
create. */
|
||||
if (prev_set != 0
|
||||
&& GET_CODE (SET_SRC (prev_set)) == CONST_INT
|
||||
&& CONST_INT_P (SET_SRC (prev_set))
|
||||
&& rtx_equal_p (SET_DEST (prev_set), reg)
|
||||
&& reg_state[regno].use_index >= 0
|
||||
&& (reg_state[REGNO (base)].store_ruid
|
||||
@ -1074,7 +1074,7 @@ reload_combine_note_use (rtx *xp, rtx insn)
|
||||
case PLUS:
|
||||
/* We are interested in (plus (reg) (const_int)) . */
|
||||
if (!REG_P (XEXP (x, 0))
|
||||
|| GET_CODE (XEXP (x, 1)) != CONST_INT)
|
||||
|| !CONST_INT_P (XEXP (x, 1)))
|
||||
break;
|
||||
offset = XEXP (x, 1);
|
||||
x = XEXP (x, 0);
|
||||
@ -1239,7 +1239,7 @@ reload_cse_move2add (rtx first)
|
||||
(set (STRICT_LOW_PART (REGX)) (CONST_INT B))
|
||||
*/
|
||||
|
||||
if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
|
||||
if (CONST_INT_P (src) && reg_base_reg[regno] < 0)
|
||||
{
|
||||
rtx new_src = gen_int_mode (INTVAL (src) - reg_offset[regno],
|
||||
GET_MODE (reg));
|
||||
@ -1325,7 +1325,7 @@ reload_cse_move2add (rtx first)
|
||||
&& SET_DEST (set) == reg
|
||||
&& GET_CODE (SET_SRC (set)) == PLUS
|
||||
&& XEXP (SET_SRC (set), 0) == reg
|
||||
&& GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (SET_SRC (set), 1)))
|
||||
{
|
||||
rtx src3 = XEXP (SET_SRC (set), 1);
|
||||
HOST_WIDE_INT added_offset = INTVAL (src3);
|
||||
@ -1398,7 +1398,7 @@ reload_cse_move2add (rtx first)
|
||||
allocation if possible. */
|
||||
&& SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
|
||||
&& hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1
|
||||
&& GET_CODE (XEXP (cnd, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (cnd, 1)))
|
||||
{
|
||||
rtx implicit_set =
|
||||
gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1));
|
||||
@ -1479,7 +1479,7 @@ move2add_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
|
||||
{
|
||||
base_reg = XEXP (src, 0);
|
||||
|
||||
if (GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (src, 1)))
|
||||
offset = INTVAL (XEXP (src, 1));
|
||||
else if (REG_P (XEXP (src, 1))
|
||||
&& (reg_set_luid[REGNO (XEXP (src, 1))]
|
||||
|
@ -175,7 +175,7 @@ print_rtx (const_rtx in_rtx)
|
||||
else
|
||||
{
|
||||
/* Print name of expression code. */
|
||||
if (flag_simple && GET_CODE (in_rtx) == CONST_INT)
|
||||
if (flag_simple && CONST_INT_P (in_rtx))
|
||||
fputc ('(', outfile);
|
||||
else
|
||||
fprintf (outfile, "(%s", GET_RTX_NAME (GET_CODE (in_rtx)));
|
||||
|
48
gcc/recog.c
48
gcc/recog.c
@ -550,13 +550,13 @@ simplify_while_replacing (rtx *loc, rtx to, rtx object,
|
||||
simplify_gen_binary to try to simplify it.
|
||||
??? We may want later to remove this, once simplification is
|
||||
separated from this function. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
|
||||
if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
|
||||
validate_change (object, loc,
|
||||
simplify_gen_binary
|
||||
(PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
|
||||
break;
|
||||
case MINUS:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
|| GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
|
||||
validate_change (object, loc,
|
||||
simplify_gen_binary
|
||||
@ -597,8 +597,8 @@ simplify_while_replacing (rtx *loc, rtx to, rtx object,
|
||||
happen, we might just fail in some cases). */
|
||||
|
||||
if (MEM_P (XEXP (x, 0))
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (x, 2)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& CONST_INT_P (XEXP (x, 2))
|
||||
&& !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
|
||||
&& !MEM_VOLATILE_P (XEXP (x, 0)))
|
||||
{
|
||||
@ -901,7 +901,7 @@ general_operand (rtx op, enum machine_mode mode)
|
||||
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
||||
return 0;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& mode != VOIDmode
|
||||
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
||||
return 0;
|
||||
@ -1078,7 +1078,7 @@ immediate_operand (rtx op, enum machine_mode mode)
|
||||
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
||||
return 0;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& mode != VOIDmode
|
||||
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
||||
return 0;
|
||||
@ -1095,7 +1095,7 @@ immediate_operand (rtx op, enum machine_mode mode)
|
||||
int
|
||||
const_int_operand (rtx op, enum machine_mode mode)
|
||||
{
|
||||
if (GET_CODE (op) != CONST_INT)
|
||||
if (!CONST_INT_P (op))
|
||||
return 0;
|
||||
|
||||
if (mode != VOIDmode
|
||||
@ -1118,7 +1118,7 @@ const_double_operand (rtx op, enum machine_mode mode)
|
||||
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
||||
return 0;
|
||||
|
||||
return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
|
||||
return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
|
||||
&& (mode == VOIDmode || GET_MODE (op) == mode
|
||||
|| GET_MODE (op) == VOIDmode));
|
||||
}
|
||||
@ -1145,7 +1145,7 @@ nonmemory_operand (rtx op, enum machine_mode mode)
|
||||
&& GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
|
||||
return 0;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& mode != VOIDmode
|
||||
&& trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
|
||||
return 0;
|
||||
@ -1212,7 +1212,7 @@ push_operand (rtx op, enum machine_mode mode)
|
||||
if (GET_CODE (op) != PRE_MODIFY
|
||||
|| GET_CODE (XEXP (op, 1)) != PLUS
|
||||
|| XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
|
||||
|| GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
|
||||
|| !CONST_INT_P (XEXP (XEXP (op, 1), 1))
|
||||
#ifdef STACK_GROWS_DOWNWARD
|
||||
|| INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
|
||||
#else
|
||||
@ -1313,7 +1313,7 @@ indirect_operand (rtx op, enum machine_mode mode)
|
||||
|
||||
return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
|
||||
|| (GET_CODE (XEXP (inner, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (inner, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
|
||||
&& general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
|
||||
}
|
||||
@ -1696,7 +1696,7 @@ asm_operand_ok (rtx op, const char *constraint, const char **constraints)
|
||||
break;
|
||||
|
||||
case 's':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
break;
|
||||
@ -1708,49 +1708,49 @@ asm_operand_ok (rtx op, const char *constraint, const char **constraints)
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
result = 1;
|
||||
break;
|
||||
|
||||
case 'I':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'J':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'K':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'L':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'M':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'N':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'O':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
case 'P':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
|
||||
result = 1;
|
||||
break;
|
||||
@ -2536,7 +2536,7 @@ constrain_operands (int strict)
|
||||
break;
|
||||
|
||||
case 's':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
break;
|
||||
@ -2546,7 +2546,7 @@ constrain_operands (int strict)
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| (GET_CODE (op) == CONST_DOUBLE
|
||||
&& GET_MODE (op) == VOIDmode))
|
||||
win = 1;
|
||||
@ -2560,7 +2560,7 @@ constrain_operands (int strict)
|
||||
case 'N':
|
||||
case 'O':
|
||||
case 'P':
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
|
||||
win = 1;
|
||||
break;
|
||||
|
@ -1126,7 +1126,7 @@ reg_scan_mark_refs (rtx x, rtx insn)
|
||||
&& REG_POINTER (SET_SRC (x)))
|
||||
|| ((GET_CODE (SET_SRC (x)) == PLUS
|
||||
|| GET_CODE (SET_SRC (x)) == LO_SUM)
|
||||
&& GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (SET_SRC (x), 1))
|
||||
&& REG_P (XEXP (SET_SRC (x), 0))
|
||||
&& REG_POINTER (XEXP (SET_SRC (x), 0)))
|
||||
|| GET_CODE (SET_SRC (x)) == CONST
|
||||
|
@ -105,7 +105,7 @@ find_use_as_address (rtx x, rtx reg, HOST_WIDE_INT plusconst)
|
||||
|
||||
if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) == reg
|
||||
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
|
||||
&& INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
|
||||
return x;
|
||||
|
||||
@ -774,7 +774,7 @@ fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset)
|
||||
if (pset && SET_DEST (pset) == dst
|
||||
&& GET_CODE (SET_SRC (pset)) == PLUS
|
||||
&& XEXP (SET_SRC (pset), 0) == src
|
||||
&& GET_CODE (XEXP (SET_SRC (pset), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (SET_SRC (pset), 1)))
|
||||
{
|
||||
HOST_WIDE_INT newconst
|
||||
= INTVAL (offset) - INTVAL (XEXP (SET_SRC (pset), 1));
|
||||
@ -1015,7 +1015,7 @@ regmove_backward_pass (void)
|
||||
if (REGNO (src) < FIRST_PSEUDO_REGISTER)
|
||||
{
|
||||
if (GET_CODE (SET_SRC (set)) == PLUS
|
||||
&& GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (SET_SRC (set), 1))
|
||||
&& XEXP (SET_SRC (set), 0) == src
|
||||
&& fixup_match_2 (insn, dst, src,
|
||||
XEXP (SET_SRC (set), 1)))
|
||||
|
40
gcc/reload.c
40
gcc/reload.c
@ -2393,12 +2393,12 @@ decompose (rtx x)
|
||||
offset = XEXP (offset, 0);
|
||||
if (GET_CODE (offset) == PLUS)
|
||||
{
|
||||
if (GET_CODE (XEXP (offset, 0)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (offset, 0)))
|
||||
{
|
||||
base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
|
||||
offset = XEXP (offset, 0);
|
||||
}
|
||||
else if (GET_CODE (XEXP (offset, 1)) == CONST_INT)
|
||||
else if (CONST_INT_P (XEXP (offset, 1)))
|
||||
{
|
||||
base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
|
||||
offset = XEXP (offset, 1);
|
||||
@ -2409,7 +2409,7 @@ decompose (rtx x)
|
||||
offset = const0_rtx;
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (offset) != CONST_INT)
|
||||
else if (!CONST_INT_P (offset))
|
||||
{
|
||||
base = gen_rtx_PLUS (GET_MODE (base), base, offset);
|
||||
offset = const0_rtx;
|
||||
@ -2418,7 +2418,7 @@ decompose (rtx x)
|
||||
if (all_const && GET_CODE (base) == PLUS)
|
||||
base = gen_rtx_CONST (GET_MODE (base), base);
|
||||
|
||||
gcc_assert (GET_CODE (offset) == CONST_INT);
|
||||
gcc_assert (CONST_INT_P (offset));
|
||||
|
||||
val.start = INTVAL (offset);
|
||||
val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
|
||||
@ -3322,7 +3322,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
||||
break;
|
||||
|
||||
case 's':
|
||||
if (GET_CODE (operand) == CONST_INT
|
||||
if (CONST_INT_P (operand)
|
||||
|| (GET_CODE (operand) == CONST_DOUBLE
|
||||
&& GET_MODE (operand) == VOIDmode))
|
||||
break;
|
||||
@ -3333,7 +3333,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
||||
break;
|
||||
|
||||
case 'n':
|
||||
if (GET_CODE (operand) == CONST_INT
|
||||
if (CONST_INT_P (operand)
|
||||
|| (GET_CODE (operand) == CONST_DOUBLE
|
||||
&& GET_MODE (operand) == VOIDmode))
|
||||
win = 1;
|
||||
@ -3347,7 +3347,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
||||
case 'N':
|
||||
case 'O':
|
||||
case 'P':
|
||||
if (GET_CODE (operand) == CONST_INT
|
||||
if (CONST_INT_P (operand)
|
||||
&& CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
|
||||
win = 1;
|
||||
break;
|
||||
@ -4233,7 +4233,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
||||
/* If we detected error and replaced asm instruction by USE, forget about the
|
||||
reloads. */
|
||||
if (GET_CODE (PATTERN (insn)) == USE
|
||||
&& GET_CODE (XEXP (PATTERN (insn), 0)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (PATTERN (insn), 0)))
|
||||
n_reloads = 0;
|
||||
|
||||
/* Perhaps an output reload can be combined with another
|
||||
@ -4934,7 +4934,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
||||
|
||||
/* But first quickly dispose of a common case. */
|
||||
if (GET_CODE (ad) == PLUS
|
||||
&& GET_CODE (XEXP (ad, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (ad, 1))
|
||||
&& REG_P (XEXP (ad, 0))
|
||||
&& reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
|
||||
return 0;
|
||||
@ -5014,7 +5014,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
||||
|| ! (REG_P (XEXP (tem, 0))
|
||||
|| (GET_CODE (XEXP (tem, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (tem, 0), 0))
|
||||
&& GET_CODE (XEXP (XEXP (tem, 0), 1)) == CONST_INT)))
|
||||
&& CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
|
||||
{
|
||||
/* Must use TEM here, not AD, since it is the one that will
|
||||
have any subexpressions reloaded, if needed. */
|
||||
@ -5036,7 +5036,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
||||
else if (GET_CODE (ad) == PLUS
|
||||
&& REG_P (XEXP (ad, 0))
|
||||
&& REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
|
||||
&& GET_CODE (XEXP (ad, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (ad, 1))
|
||||
&& regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
|
||||
CONST_INT))
|
||||
|
||||
@ -5110,7 +5110,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
||||
|
||||
inner_code = GET_CODE (XEXP (ad, 0));
|
||||
if (!(GET_CODE (ad) == PLUS
|
||||
&& GET_CODE (XEXP (ad, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (ad, 1))
|
||||
&& (inner_code == PLUS || inner_code == LO_SUM)))
|
||||
continue;
|
||||
|
||||
@ -5266,7 +5266,7 @@ subst_reg_equivs (rtx ad, rtx insn)
|
||||
case PLUS:
|
||||
/* Quickly dispose of a common case. */
|
||||
if (XEXP (ad, 0) == frame_pointer_rtx
|
||||
&& GET_CODE (XEXP (ad, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (ad, 1)))
|
||||
return ad;
|
||||
break;
|
||||
|
||||
@ -5300,9 +5300,9 @@ form_sum (rtx x, rtx y)
|
||||
if (mode == VOIDmode)
|
||||
mode = Pmode;
|
||||
|
||||
if (GET_CODE (x) == CONST_INT)
|
||||
if (CONST_INT_P (x))
|
||||
return plus_constant (y, INTVAL (x));
|
||||
else if (GET_CODE (y) == CONST_INT)
|
||||
else if (CONST_INT_P (y))
|
||||
return plus_constant (x, INTVAL (y));
|
||||
else if (CONSTANT_P (x))
|
||||
tem = x, x = y, y = tem;
|
||||
@ -6111,7 +6111,7 @@ find_reloads_subreg_address (rtx x, int force_replace, int opnum,
|
||||
base = XEXP (tem, 0);
|
||||
if (GET_CODE (base) == PLUS)
|
||||
{
|
||||
if (GET_CODE (XEXP (base, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (base, 1))
|
||||
&& INTVAL (XEXP (base, 1)) % outer_size != 0)
|
||||
return x;
|
||||
base = XEXP (base, 0);
|
||||
@ -6547,7 +6547,7 @@ reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
|
||||
/* If either argument is a constant, then modifying X can not affect IN. */
|
||||
if (CONSTANT_P (x) || CONSTANT_P (in))
|
||||
return 0;
|
||||
else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
|
||||
else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
|
||||
return refers_to_mem_for_reload_p (in);
|
||||
else if (GET_CODE (x) == SUBREG)
|
||||
{
|
||||
@ -6781,7 +6781,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
|
||||
|| (REG_P (SET_DEST (pat))
|
||||
&& GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
|
||||
&& SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
|
||||
&& GET_CODE (goal) == CONST_INT
|
||||
&& CONST_INT_P (goal)
|
||||
&& 0 != (goaltry
|
||||
= operand_subword (XEXP (tem, 0), 0, 0,
|
||||
VOIDmode))
|
||||
@ -6795,7 +6795,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
|
||||
&& REG_P (SET_DEST (pat))
|
||||
&& GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
|
||||
&& SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
|
||||
&& GET_CODE (goal) == CONST_INT
|
||||
&& CONST_INT_P (goal)
|
||||
&& 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
|
||||
VOIDmode))
|
||||
&& rtx_equal_p (goal, goaltry)
|
||||
@ -7123,7 +7123,7 @@ find_inc_amount (rtx x, rtx inced)
|
||||
&& GET_CODE (XEXP (addr, 1)) == PLUS
|
||||
&& XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
|
||||
&& XEXP (addr, 0) == inced
|
||||
&& GET_CODE (XEXP (XEXP (addr, 1), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
|
||||
{
|
||||
i = INTVAL (XEXP (XEXP (addr, 1), 1));
|
||||
return i < 0 ? -i : i;
|
||||
|
@ -2566,7 +2566,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
|
||||
We special-case the commonest situation in
|
||||
eliminate_regs_in_insn, so just replace a PLUS with a
|
||||
PLUS here, unless inside a MEM. */
|
||||
if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) == - ep->previous_offset)
|
||||
return ep->to_rtx;
|
||||
else
|
||||
@ -2632,7 +2632,7 @@ eliminate_regs_1 (rtx x, enum machine_mode mem_mode, rtx insn,
|
||||
We ignore the possibility of overflow here. */
|
||||
if (REG_P (XEXP (x, 0))
|
||||
&& REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
|
||||
ep++)
|
||||
if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
|
||||
@ -3044,7 +3044,7 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
|
||||
|
||||
if (GET_CODE (src) == PLUS
|
||||
&& XEXP (src, 0) == SET_DEST (x)
|
||||
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (src, 1)))
|
||||
ep->offset -= INTVAL (XEXP (src, 1));
|
||||
else
|
||||
ep->can_eliminate = 0;
|
||||
@ -3179,7 +3179,7 @@ eliminate_regs_in_insn (rtx insn, int replace)
|
||||
rtx prev_insn, prev_set;
|
||||
|
||||
if (GET_CODE (base) == PLUS
|
||||
&& GET_CODE (XEXP (base, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (base, 1)))
|
||||
{
|
||||
offset += INTVAL (XEXP (base, 1));
|
||||
base = XEXP (base, 0);
|
||||
@ -3254,7 +3254,7 @@ eliminate_regs_in_insn (rtx insn, int replace)
|
||||
plus_src = SET_SRC (old_set);
|
||||
/* First see if the source is of the form (plus (...) CST). */
|
||||
if (plus_src
|
||||
&& GET_CODE (XEXP (plus_src, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (plus_src, 1)))
|
||||
plus_cst_src = plus_src;
|
||||
else if (REG_P (SET_SRC (old_set))
|
||||
|| plus_src)
|
||||
@ -3267,7 +3267,7 @@ eliminate_regs_in_insn (rtx insn, int replace)
|
||||
if ((REG_NOTE_KIND (links) == REG_EQUAL
|
||||
|| REG_NOTE_KIND (links) == REG_EQUIV)
|
||||
&& GET_CODE (XEXP (links, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (links, 0), 1)))
|
||||
{
|
||||
plus_cst_src = XEXP (links, 0);
|
||||
break;
|
||||
@ -3579,7 +3579,7 @@ mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
|
||||
&& (GET_CODE (x) != SET
|
||||
|| GET_CODE (SET_SRC (x)) != PLUS
|
||||
|| XEXP (SET_SRC (x), 0) != dest
|
||||
|| GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
|
||||
|| !CONST_INT_P (XEXP (SET_SRC (x), 1))))
|
||||
{
|
||||
reg_eliminate[i].can_eliminate_previous
|
||||
= reg_eliminate[i].can_eliminate = 0;
|
||||
@ -8577,8 +8577,8 @@ delete_address_reloads (rtx dead_insn, rtx current_insn)
|
||||
set2 = single_set (prev);
|
||||
if (! set || ! set2
|
||||
|| GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
|
||||
|| GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
|
||||
|| GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
|
||||
|| !CONST_INT_P (XEXP (SET_SRC (set), 1))
|
||||
|| !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
|
||||
return;
|
||||
dst = SET_DEST (set);
|
||||
if (! rtx_equal_p (dst, SET_DEST (set2))
|
||||
@ -8803,7 +8803,7 @@ inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
|
||||
|
||||
emit_insn (gen_add2_insn (reloadreg, inc));
|
||||
store = emit_insn (gen_move_insn (incloc, reloadreg));
|
||||
if (GET_CODE (inc) == CONST_INT)
|
||||
if (CONST_INT_P (inc))
|
||||
emit_insn (gen_add2_insn (reloadreg, GEN_INT (-INTVAL (inc))));
|
||||
else
|
||||
emit_insn (gen_sub2_insn (reloadreg, inc));
|
||||
|
@ -2931,7 +2931,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
|
||||
/* If this is a constant adjustment, use the same code with
|
||||
the negated constant. Otherwise, reverse the sense of the
|
||||
arithmetic. */
|
||||
if (GET_CODE (other) == CONST_INT)
|
||||
if (CONST_INT_P (other))
|
||||
new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
|
||||
negate_rtx (GET_MODE (src), other));
|
||||
else
|
||||
@ -3856,9 +3856,7 @@ dbr_schedule (rtx first)
|
||||
INSN_FROM_TARGET_P (insn) = 0;
|
||||
|
||||
/* Skip vector tables. We can't get attributes for them. */
|
||||
if (JUMP_P (insn)
|
||||
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
|
||||
if (JUMP_TABLE_DATA_P (insn))
|
||||
continue;
|
||||
|
||||
if (num_delay_slots (insn) > 0)
|
||||
|
@ -211,7 +211,7 @@ shared_const_p (const_rtx orig)
|
||||
a LABEL_REF, it isn't sharable. */
|
||||
return (GET_CODE (XEXP (orig, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
|
||||
&& GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT);
|
||||
&& CONST_INT_P(XEXP (XEXP (orig, 0), 1)));
|
||||
}
|
||||
|
||||
|
||||
|
@ -341,7 +341,7 @@ rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
|
||||
/* - or it is an address that can't trap plus a constant integer,
|
||||
with the proper remainder modulo the mode size if we are
|
||||
considering unaligned memory references. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
|
||||
size, mode, unaligned_mems))
|
||||
return 0;
|
||||
@ -408,7 +408,7 @@ nonzero_address_p (const_rtx x)
|
||||
return nonzero_address_p (XEXP (x, 0));
|
||||
|
||||
case PLUS:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1)))
|
||||
return nonzero_address_p (XEXP (x, 0));
|
||||
/* Handle PIC references. */
|
||||
else if (XEXP (x, 0) == pic_offset_table_rtx
|
||||
@ -420,7 +420,7 @@ nonzero_address_p (const_rtx x)
|
||||
/* Similar to the above; allow positive offsets. Further, since
|
||||
auto-inc is only allowed in memories, the register must be a
|
||||
pointer. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) > 0)
|
||||
return true;
|
||||
return nonzero_address_p (XEXP (x, 0));
|
||||
@ -495,10 +495,10 @@ get_integer_term (const_rtx x)
|
||||
x = XEXP (x, 0);
|
||||
|
||||
if (GET_CODE (x) == MINUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return - INTVAL (XEXP (x, 1));
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return INTVAL (XEXP (x, 1));
|
||||
return 0;
|
||||
}
|
||||
@ -514,10 +514,10 @@ get_related_value (const_rtx x)
|
||||
return 0;
|
||||
x = XEXP (x, 0);
|
||||
if (GET_CODE (x) == PLUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return XEXP (x, 0);
|
||||
else if (GET_CODE (x) == MINUS
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (x, 1)))
|
||||
return XEXP (x, 0);
|
||||
return 0;
|
||||
}
|
||||
@ -566,7 +566,7 @@ split_const (rtx x, rtx *base_out, rtx *offset_out)
|
||||
if (GET_CODE (x) == CONST)
|
||||
{
|
||||
x = XEXP (x, 0);
|
||||
if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
|
||||
{
|
||||
*base_out = XEXP (x, 0);
|
||||
*offset_out = XEXP (x, 1);
|
||||
@ -2495,7 +2495,7 @@ replace_rtx (rtx x, rtx from, rtx to)
|
||||
{
|
||||
rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
|
||||
|
||||
if (GET_CODE (new_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (new_rtx))
|
||||
{
|
||||
x = simplify_subreg (GET_MODE (x), new_rtx,
|
||||
GET_MODE (SUBREG_REG (x)),
|
||||
@ -2511,7 +2511,7 @@ replace_rtx (rtx x, rtx from, rtx to)
|
||||
{
|
||||
rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
|
||||
|
||||
if (GET_CODE (new_rtx) == CONST_INT)
|
||||
if (CONST_INT_P (new_rtx))
|
||||
{
|
||||
x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
|
||||
new_rtx, GET_MODE (XEXP (x, 0)));
|
||||
@ -2641,9 +2641,7 @@ tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
|
||||
if (JUMP_P (insn)
|
||||
&& (label = JUMP_LABEL (insn)) != NULL_RTX
|
||||
&& (table = next_active_insn (label)) != NULL_RTX
|
||||
&& JUMP_P (table)
|
||||
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|
||||
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
|
||||
&& JUMP_TABLE_DATA_P (table))
|
||||
{
|
||||
if (labelp)
|
||||
*labelp = label;
|
||||
@ -3995,7 +3993,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
break;
|
||||
|
||||
case ZERO_EXTRACT:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
|
||||
break;
|
||||
@ -4053,7 +4051,7 @@ nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
the shift when shifted the appropriate number of bits. This
|
||||
shows that high-order bits are cleared by the right shift and
|
||||
low-order bits by left shifts. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0
|
||||
&& INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
|
||||
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
|
||||
@ -4348,7 +4346,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
break;
|
||||
|
||||
case SIGN_EXTRACT:
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 1)))
|
||||
return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
|
||||
break;
|
||||
|
||||
@ -4372,7 +4370,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
/* If we are rotating left by a number of bits less than the number
|
||||
of sign bit copies, we can just subtract that amount from the
|
||||
number. */
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) >= 0
|
||||
&& INTVAL (XEXP (x, 1)) < (int) bitwidth)
|
||||
{
|
||||
@ -4418,7 +4416,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
if (code == AND
|
||||
&& num1 > 1
|
||||
&& bitwidth <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
|
||||
return num1;
|
||||
|
||||
@ -4426,7 +4424,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
if (code == IOR
|
||||
&& num1 > 1
|
||||
&& bitwidth <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (x, 1))
|
||||
&& (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
|
||||
return num1;
|
||||
|
||||
@ -4536,7 +4534,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
sign bit. */
|
||||
num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
|
||||
known_x, known_mode, known_ret);
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) > 0
|
||||
&& INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
|
||||
num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
|
||||
@ -4545,7 +4543,7 @@ num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
|
||||
|
||||
case ASHIFT:
|
||||
/* Left shifts destroy copies. */
|
||||
if (GET_CODE (XEXP (x, 1)) != CONST_INT
|
||||
if (!CONST_INT_P (XEXP (x, 1))
|
||||
|| INTVAL (XEXP (x, 1)) < 0
|
||||
|| INTVAL (XEXP (x, 1)) >= (int) bitwidth
|
||||
|| INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
|
||||
@ -4857,7 +4855,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
|
||||
overflow. */
|
||||
|
||||
if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (op1)
|
||||
&& GET_MODE (op0) != VOIDmode
|
||||
&& GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
|
@ -81,7 +81,7 @@ print_exp (char *buf, const_rtx x, int verbose)
|
||||
{
|
||||
case PLUS:
|
||||
op[0] = XEXP (x, 0);
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < 0)
|
||||
{
|
||||
st[1] = "-";
|
||||
@ -750,7 +750,7 @@ print_rtl_slim (FILE *f, rtx first, rtx last, int count, int flags)
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
if ((flags & TDF_BLOCKS)
|
||||
&& (INSN_P (insn) || GET_CODE (insn) == NOTE)
|
||||
&& (INSN_P (insn) || NOTE_P (insn))
|
||||
&& BLOCK_FOR_INSN (insn)
|
||||
&& !current_bb)
|
||||
{
|
||||
|
@ -898,7 +898,7 @@ sdbout_symbol (tree decl, int local)
|
||||
else if (MEM_P (value)
|
||||
&& ((GET_CODE (XEXP (value, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (value, 0), 0))
|
||||
&& GET_CODE (XEXP (XEXP (value, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (value, 0), 1)))
|
||||
/* This is for variables which are at offset zero from
|
||||
the frame pointer. This happens on the Alpha.
|
||||
Non-frame pointer registers are excluded above. */
|
||||
@ -1285,7 +1285,7 @@ sdbout_parms (tree parms)
|
||||
If that is not true, we produce meaningless results,
|
||||
but do not crash. */
|
||||
if (GET_CODE (addr) == PLUS
|
||||
&& GET_CODE (XEXP (addr, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (addr, 1)))
|
||||
current_sym_value = INTVAL (XEXP (addr, 1));
|
||||
else
|
||||
current_sym_value = 0;
|
||||
@ -1413,7 +1413,7 @@ sdbout_reg_parms (tree parms)
|
||||
/* Report parms that live in memory but not where they were passed. */
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& GET_CODE (XEXP (DECL_RTL (parms), 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (DECL_RTL (parms), 0), 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (DECL_RTL (parms), 0), 1))
|
||||
&& PARM_PASSED_IN_MEMORY (parms)
|
||||
&& ! rtx_equal_p (DECL_RTL (parms), DECL_INCOMING_RTL (parms)))
|
||||
{
|
||||
|
@ -2527,7 +2527,7 @@ setup_id_lhs_rhs (idata_t id, insn_t insn, bool force_unique_p)
|
||||
{
|
||||
rtx pat = PATTERN (insn);
|
||||
|
||||
if (GET_CODE (insn) == INSN
|
||||
if (NONJUMP_INSN_P (insn)
|
||||
&& GET_CODE (pat) == SET
|
||||
&& !force_unique_p)
|
||||
{
|
||||
|
@ -88,7 +88,7 @@ mode_signbit_p (enum machine_mode mode, const_rtx x)
|
||||
return false;
|
||||
|
||||
if (width <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (x) == CONST_INT)
|
||||
&& CONST_INT_P (x))
|
||||
val = INTVAL (x);
|
||||
else if (width <= 2 * HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (x) == CONST_DOUBLE
|
||||
@ -169,7 +169,7 @@ avoid_constant_pool_reference (rtx x)
|
||||
/* Split the address into a base and integer offset. */
|
||||
if (GET_CODE (addr) == CONST
|
||||
&& GET_CODE (XEXP (addr, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (XEXP (addr, 0), 1)))
|
||||
{
|
||||
offset = INTVAL (XEXP (XEXP (addr, 0), 1));
|
||||
addr = XEXP (XEXP (addr, 0), 0);
|
||||
@ -413,14 +413,14 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
||||
|
||||
/* (not (xor X C)) for C constant is (xor X D) with D = ~C. */
|
||||
if (GET_CODE (op) == XOR
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& (temp = simplify_unary_operation (NOT, mode,
|
||||
XEXP (op, 1), mode)) != 0)
|
||||
return simplify_gen_binary (XOR, mode, XEXP (op, 0), temp);
|
||||
|
||||
/* (not (plus X C)) for signbit C is (xor X D) with D = ~C. */
|
||||
if (GET_CODE (op) == PLUS
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& mode_signbit_p (mode, XEXP (op, 1))
|
||||
&& (temp = simplify_unary_operation (NOT, mode,
|
||||
XEXP (op, 1), mode)) != 0)
|
||||
@ -445,7 +445,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
||||
|
||||
if (STORE_FLAG_VALUE == -1
|
||||
&& GET_CODE (op) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op, 1))
|
||||
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return simplify_gen_relational (GE, mode, VOIDmode,
|
||||
XEXP (op, 0), const0_rtx);
|
||||
@ -526,7 +526,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
||||
&& !HONOR_SIGN_DEPENDENT_ROUNDING (mode))
|
||||
{
|
||||
/* (neg (plus A C)) is simplified to (minus -C A). */
|
||||
if (GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (op, 1))
|
||||
|| GET_CODE (XEXP (op, 1)) == CONST_DOUBLE)
|
||||
{
|
||||
temp = simplify_unary_operation (NEG, mode, XEXP (op, 1), mode);
|
||||
@ -561,7 +561,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
||||
/* (neg (ashiftrt X C)) can be replaced by (lshiftrt X C) when
|
||||
C is equal to the width of MODE minus 1. */
|
||||
if (GET_CODE (op) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return simplify_gen_binary (LSHIFTRT, mode,
|
||||
XEXP (op, 0), XEXP (op, 1));
|
||||
@ -569,7 +569,7 @@ simplify_unary_operation_1 (enum rtx_code code, enum machine_mode mode, rtx op)
|
||||
/* (neg (lshiftrt X C)) can be replaced by (ashiftrt X C) when
|
||||
C is equal to the width of MODE minus 1. */
|
||||
if (GET_CODE (op) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& INTVAL (XEXP (op, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return simplify_gen_binary (ASHIFTRT, mode,
|
||||
XEXP (op, 0), XEXP (op, 1));
|
||||
@ -923,7 +923,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER
|
||||
(GET_MODE (op)));
|
||||
}
|
||||
if (GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE
|
||||
if (CONST_INT_P (op) || GET_CODE (op) == CONST_DOUBLE
|
||||
|| GET_CODE (op) == CONST_VECTOR)
|
||||
{
|
||||
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
|
||||
@ -977,12 +977,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
such as FIX. At some point, this should be simplified. */
|
||||
|
||||
if (code == FLOAT && GET_MODE (op) == VOIDmode
|
||||
&& (GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT))
|
||||
&& (GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op)))
|
||||
{
|
||||
HOST_WIDE_INT hv, lv;
|
||||
REAL_VALUE_TYPE d;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT)
|
||||
if (CONST_INT_P (op))
|
||||
lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
|
||||
else
|
||||
lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
|
||||
@ -993,12 +993,12 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
}
|
||||
else if (code == UNSIGNED_FLOAT && GET_MODE (op) == VOIDmode
|
||||
&& (GET_CODE (op) == CONST_DOUBLE
|
||||
|| GET_CODE (op) == CONST_INT))
|
||||
|| CONST_INT_P (op)))
|
||||
{
|
||||
HOST_WIDE_INT hv, lv;
|
||||
REAL_VALUE_TYPE d;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT)
|
||||
if (CONST_INT_P (op))
|
||||
lv = INTVAL (op), hv = HWI_SIGN_EXTEND (lv);
|
||||
else
|
||||
lv = CONST_DOUBLE_LOW (op), hv = CONST_DOUBLE_HIGH (op);
|
||||
@ -1020,7 +1020,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
return CONST_DOUBLE_FROM_REAL_VALUE (d, mode);
|
||||
}
|
||||
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
&& width <= HOST_BITS_PER_WIDE_INT && width > 0)
|
||||
{
|
||||
HOST_WIDE_INT arg0 = INTVAL (op);
|
||||
@ -1164,7 +1164,7 @@ simplify_const_unary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
else if (GET_MODE (op) == VOIDmode
|
||||
&& width <= HOST_BITS_PER_WIDE_INT * 2
|
||||
&& (GET_CODE (op) == CONST_DOUBLE
|
||||
|| GET_CODE (op) == CONST_INT))
|
||||
|| CONST_INT_P (op)))
|
||||
{
|
||||
unsigned HOST_WIDE_INT l1, lv;
|
||||
HOST_WIDE_INT h1, hv;
|
||||
@ -1597,12 +1597,12 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
if ((GET_CODE (op0) == CONST
|
||||
|| GET_CODE (op0) == SYMBOL_REF
|
||||
|| GET_CODE (op0) == LABEL_REF)
|
||||
&& GET_CODE (op1) == CONST_INT)
|
||||
&& CONST_INT_P (op1))
|
||||
return plus_constant (op0, INTVAL (op1));
|
||||
else if ((GET_CODE (op1) == CONST
|
||||
|| GET_CODE (op1) == SYMBOL_REF
|
||||
|| GET_CODE (op1) == LABEL_REF)
|
||||
&& GET_CODE (op0) == CONST_INT)
|
||||
&& CONST_INT_P (op0))
|
||||
return plus_constant (op1, INTVAL (op0));
|
||||
|
||||
/* See if this is something like X * C - X or vice versa or
|
||||
@ -1624,14 +1624,14 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
lhs = XEXP (lhs, 0);
|
||||
}
|
||||
else if (GET_CODE (lhs) == MULT
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (lhs, 1)))
|
||||
{
|
||||
coeff0l = INTVAL (XEXP (lhs, 1));
|
||||
coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
|
||||
lhs = XEXP (lhs, 0);
|
||||
}
|
||||
else if (GET_CODE (lhs) == ASHIFT
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (lhs, 1))
|
||||
&& INTVAL (XEXP (lhs, 1)) >= 0
|
||||
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -1647,14 +1647,14 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
rhs = XEXP (rhs, 0);
|
||||
}
|
||||
else if (GET_CODE (rhs) == MULT
|
||||
&& GET_CODE (XEXP (rhs, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (rhs, 1)))
|
||||
{
|
||||
coeff1l = INTVAL (XEXP (rhs, 1));
|
||||
coeff1h = INTVAL (XEXP (rhs, 1)) < 0 ? -1 : 0;
|
||||
rhs = XEXP (rhs, 0);
|
||||
}
|
||||
else if (GET_CODE (rhs) == ASHIFT
|
||||
&& GET_CODE (XEXP (rhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (rhs, 1))
|
||||
&& INTVAL (XEXP (rhs, 1)) >= 0
|
||||
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -1681,10 +1681,10 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
}
|
||||
|
||||
/* (plus (xor X C1) C2) is (xor X (C1^C2)) if C2 is signbit. */
|
||||
if ((GET_CODE (op1) == CONST_INT
|
||||
if ((CONST_INT_P (op1)
|
||||
|| GET_CODE (op1) == CONST_DOUBLE)
|
||||
&& GET_CODE (op0) == XOR
|
||||
&& (GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& (CONST_INT_P (XEXP (op0, 1))
|
||||
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
|
||||
&& mode_signbit_p (mode, op1))
|
||||
return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
|
||||
@ -1807,14 +1807,14 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
lhs = XEXP (lhs, 0);
|
||||
}
|
||||
else if (GET_CODE (lhs) == MULT
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (lhs, 1)))
|
||||
{
|
||||
coeff0l = INTVAL (XEXP (lhs, 1));
|
||||
coeff0h = INTVAL (XEXP (lhs, 1)) < 0 ? -1 : 0;
|
||||
lhs = XEXP (lhs, 0);
|
||||
}
|
||||
else if (GET_CODE (lhs) == ASHIFT
|
||||
&& GET_CODE (XEXP (lhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (lhs, 1))
|
||||
&& INTVAL (XEXP (lhs, 1)) >= 0
|
||||
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -1830,14 +1830,14 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
rhs = XEXP (rhs, 0);
|
||||
}
|
||||
else if (GET_CODE (rhs) == MULT
|
||||
&& GET_CODE (XEXP (rhs, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (XEXP (rhs, 1)))
|
||||
{
|
||||
negcoeff1l = -INTVAL (XEXP (rhs, 1));
|
||||
negcoeff1h = INTVAL (XEXP (rhs, 1)) <= 0 ? 0 : -1;
|
||||
rhs = XEXP (rhs, 0);
|
||||
}
|
||||
else if (GET_CODE (rhs) == ASHIFT
|
||||
&& GET_CODE (XEXP (rhs, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (rhs, 1))
|
||||
&& INTVAL (XEXP (rhs, 1)) >= 0
|
||||
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -1869,7 +1869,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
/* (-x - c) may be simplified as (-c - x). */
|
||||
if (GET_CODE (op0) == NEG
|
||||
&& (GET_CODE (op1) == CONST_INT
|
||||
&& (CONST_INT_P (op1)
|
||||
|| GET_CODE (op1) == CONST_DOUBLE))
|
||||
{
|
||||
tem = simplify_unary_operation (NEG, mode, op1, mode);
|
||||
@ -1878,7 +1878,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
}
|
||||
|
||||
/* Don't let a relocatable value get a negative coeff. */
|
||||
if (GET_CODE (op1) == CONST_INT && GET_MODE (op0) != VOIDmode)
|
||||
if (CONST_INT_P (op1) && GET_MODE (op0) != VOIDmode)
|
||||
return simplify_gen_binary (PLUS, mode,
|
||||
op0,
|
||||
neg_const_int (mode, op1));
|
||||
@ -1975,7 +1975,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
/* Convert multiply by constant power of two into shift unless
|
||||
we are still generating RTL. This test is a kludge. */
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& (val = exact_log2 (INTVAL (trueop1))) >= 0
|
||||
/* If the mode is larger than the host word size, and the
|
||||
uppermost bit is set, then this isn't a power of two due
|
||||
@ -2040,7 +2040,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
case IOR:
|
||||
if (trueop1 == const0_rtx)
|
||||
return op0;
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& ((INTVAL (trueop1) & GET_MODE_MASK (mode))
|
||||
== GET_MODE_MASK (mode)))
|
||||
return op1;
|
||||
@ -2054,15 +2054,15 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
return constm1_rtx;
|
||||
|
||||
/* (ior A C) is C if all bits of A that might be nonzero are on in C. */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op1)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
|
||||
return op1;
|
||||
|
||||
/* Canonicalize (X & C1) | C2. */
|
||||
if (GET_CODE (op0) == AND
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& CONST_INT_P (XEXP (op0, 1)))
|
||||
{
|
||||
HOST_WIDE_INT mask = GET_MODE_MASK (mode);
|
||||
HOST_WIDE_INT c1 = INTVAL (XEXP (op0, 1));
|
||||
@ -2111,8 +2111,8 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
if (GET_CODE (opleft) == ASHIFT && GET_CODE (opright) == LSHIFTRT
|
||||
&& rtx_equal_p (XEXP (opleft, 0), XEXP (opright, 0))
|
||||
&& GET_CODE (XEXP (opleft, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (opright, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (opleft, 1))
|
||||
&& CONST_INT_P (XEXP (opright, 1))
|
||||
&& (INTVAL (XEXP (opleft, 1)) + INTVAL (XEXP (opright, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (opright, 0), XEXP (opleft, 1));
|
||||
@ -2130,8 +2130,8 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
< GET_MODE_SIZE (GET_MODE (SUBREG_REG (opleft))))
|
||||
&& rtx_equal_p (XEXP (SUBREG_REG (opleft), 0),
|
||||
SUBREG_REG (XEXP (opright, 0)))
|
||||
&& GET_CODE (XEXP (SUBREG_REG (opleft), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (opright, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (SUBREG_REG (opleft), 1))
|
||||
&& CONST_INT_P (XEXP (opright, 1))
|
||||
&& (INTVAL (XEXP (SUBREG_REG (opleft), 1)) + INTVAL (XEXP (opright, 1))
|
||||
== GET_MODE_BITSIZE (mode)))
|
||||
return gen_rtx_ROTATE (mode, XEXP (opright, 0),
|
||||
@ -2139,12 +2139,12 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
/* If we have (ior (and (X C1) C2)), simplify this by making
|
||||
C1 as small as possible if C1 actually changes. */
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op1)
|
||||
&& (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
|| INTVAL (op1) > 0)
|
||||
&& GET_CODE (op0) == AND
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& CONST_INT_P (op1)
|
||||
&& (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
|
||||
return simplify_gen_binary (IOR, mode,
|
||||
simplify_gen_binary
|
||||
@ -2158,10 +2158,10 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
the PLUS does not affect any of the bits in OP1: then we can do
|
||||
the IOR as a PLUS and we can associate. This is valid if OP1
|
||||
can be safely shifted left C bits. */
|
||||
if (GET_CODE (trueop1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
|
||||
if (CONST_INT_P (trueop1) && GET_CODE (op0) == ASHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (op0, 0), 1))
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
int count = INTVAL (XEXP (op0, 1));
|
||||
@ -2182,7 +2182,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
case XOR:
|
||||
if (trueop1 == const0_rtx)
|
||||
return op0;
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& ((INTVAL (trueop1) & GET_MODE_MASK (mode))
|
||||
== GET_MODE_MASK (mode)))
|
||||
return simplify_gen_unary (NOT, mode, op0, mode);
|
||||
@ -2192,15 +2192,15 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
return CONST0_RTX (mode);
|
||||
|
||||
/* Canonicalize XOR of the most significant bit to PLUS. */
|
||||
if ((GET_CODE (op1) == CONST_INT
|
||||
if ((CONST_INT_P (op1)
|
||||
|| GET_CODE (op1) == CONST_DOUBLE)
|
||||
&& mode_signbit_p (mode, op1))
|
||||
return simplify_gen_binary (PLUS, mode, op0, op1);
|
||||
/* (xor (plus X C1) C2) is (xor X (C1^C2)) if C1 is signbit. */
|
||||
if ((GET_CODE (op1) == CONST_INT
|
||||
if ((CONST_INT_P (op1)
|
||||
|| GET_CODE (op1) == CONST_DOUBLE)
|
||||
&& GET_CODE (op0) == PLUS
|
||||
&& (GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& (CONST_INT_P (XEXP (op0, 1))
|
||||
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE)
|
||||
&& mode_signbit_p (mode, XEXP (op0, 1)))
|
||||
return simplify_gen_binary (XOR, mode, XEXP (op0, 0),
|
||||
@ -2270,7 +2270,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
if (STORE_FLAG_VALUE == 1
|
||||
&& trueop1 == const1_rtx
|
||||
&& GET_CODE (op0) == LSHIFTRT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
|
||||
return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
|
||||
|
||||
@ -2296,7 +2296,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
{
|
||||
HOST_WIDE_INT nzop0 = nonzero_bits (trueop0, mode);
|
||||
HOST_WIDE_INT nzop1;
|
||||
if (GET_CODE (trueop1) == CONST_INT)
|
||||
if (CONST_INT_P (trueop1))
|
||||
{
|
||||
HOST_WIDE_INT val1 = INTVAL (trueop1);
|
||||
/* If we are turning off bits already known off in OP0, we need
|
||||
@ -2324,7 +2324,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
there are no nonzero bits of C outside of X's mode. */
|
||||
if ((GET_CODE (op0) == SIGN_EXTEND
|
||||
|| GET_CODE (op0) == ZERO_EXTEND)
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))
|
||||
& INTVAL (trueop1)) == 0)
|
||||
@ -2338,8 +2338,8 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
/* Canonicalize (A | C1) & C2 as (A & C2) | (C1 & C2). */
|
||||
if (GET_CODE (op0) == IOR
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& CONST_INT_P (XEXP (op0, 1)))
|
||||
{
|
||||
HOST_WIDE_INT tmp = INTVAL (trueop1) & INTVAL (XEXP (op0, 1));
|
||||
return simplify_gen_binary (IOR, mode,
|
||||
@ -2394,7 +2394,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
and for - instead of + and/or ^ instead of |.
|
||||
Also, if (N & M) == 0, then
|
||||
(A +- N) & M -> A & M. */
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& ~INTVAL (trueop1)
|
||||
&& (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0
|
||||
@ -2406,7 +2406,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
pmop[0] = XEXP (op0, 0);
|
||||
pmop[1] = XEXP (op0, 1);
|
||||
|
||||
if (GET_CODE (pmop[1]) == CONST_INT
|
||||
if (CONST_INT_P (pmop[1])
|
||||
&& (INTVAL (pmop[1]) & INTVAL (trueop1)) == 0)
|
||||
return simplify_gen_binary (AND, mode, pmop[0], op1);
|
||||
|
||||
@ -2416,14 +2416,14 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
switch (GET_CODE (tem))
|
||||
{
|
||||
case AND:
|
||||
if (GET_CODE (XEXP (tem, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (tem, 1))
|
||||
&& (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1))
|
||||
== INTVAL (trueop1))
|
||||
pmop[which] = XEXP (tem, 0);
|
||||
break;
|
||||
case IOR:
|
||||
case XOR:
|
||||
if (GET_CODE (XEXP (tem, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (tem, 1))
|
||||
&& (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0)
|
||||
pmop[which] = XEXP (tem, 0);
|
||||
break;
|
||||
@ -2469,7 +2469,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
if (trueop1 == CONST1_RTX (mode))
|
||||
return rtl_hooks.gen_lowpart_no_emit (mode, op0);
|
||||
/* Convert divide by power of two into shift. */
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& (val = exact_log2 (INTVAL (trueop1))) > 0)
|
||||
return simplify_gen_binary (LSHIFTRT, mode, op0, GEN_INT (val));
|
||||
break;
|
||||
@ -2551,7 +2551,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
return CONST0_RTX (mode);
|
||||
}
|
||||
/* Implement modulus by power of two as AND. */
|
||||
if (GET_CODE (trueop1) == CONST_INT
|
||||
if (CONST_INT_P (trueop1)
|
||||
&& exact_log2 (INTVAL (trueop1)) > 0)
|
||||
return simplify_gen_binary (AND, mode, op0,
|
||||
GEN_INT (INTVAL (op1) - 1));
|
||||
@ -2582,12 +2582,12 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
if (trueop0 == CONST0_RTX (mode) && ! side_effects_p (op1))
|
||||
return op0;
|
||||
/* Rotating ~0 always results in ~0. */
|
||||
if (GET_CODE (trueop0) == CONST_INT && width <= HOST_BITS_PER_WIDE_INT
|
||||
if (CONST_INT_P (trueop0) && width <= HOST_BITS_PER_WIDE_INT
|
||||
&& (unsigned HOST_WIDE_INT) INTVAL (trueop0) == GET_MODE_MASK (mode)
|
||||
&& ! side_effects_p (op1))
|
||||
return op0;
|
||||
canonicalize_shift:
|
||||
if (SHIFT_COUNT_TRUNCATED && GET_CODE (op1) == CONST_INT)
|
||||
if (SHIFT_COUNT_TRUNCATED && CONST_INT_P (op1))
|
||||
{
|
||||
val = INTVAL (op1) & (GET_MODE_BITSIZE (mode) - 1);
|
||||
if (val != INTVAL (op1))
|
||||
@ -2611,7 +2611,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
return op0;
|
||||
/* Optimize (lshiftrt (clz X) C) as (eq X 0). */
|
||||
if (GET_CODE (op0) == CLZ
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& STORE_FLAG_VALUE == 1
|
||||
&& INTVAL (trueop1) < (HOST_WIDE_INT)width)
|
||||
{
|
||||
@ -2628,7 +2628,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
case SMIN:
|
||||
if (width <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& INTVAL (trueop1) == (HOST_WIDE_INT) 1 << (width -1)
|
||||
&& ! side_effects_p (op0))
|
||||
return op1;
|
||||
@ -2641,7 +2641,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
|
||||
case SMAX:
|
||||
if (width <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (trueop1) == CONST_INT
|
||||
&& CONST_INT_P (trueop1)
|
||||
&& ((unsigned HOST_WIDE_INT) INTVAL (trueop1)
|
||||
== (unsigned HOST_WIDE_INT) GET_MODE_MASK (mode) >> 1)
|
||||
&& ! side_effects_p (op0))
|
||||
@ -2691,7 +2691,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0)));
|
||||
gcc_assert (GET_CODE (trueop1) == PARALLEL);
|
||||
gcc_assert (XVECLEN (trueop1, 0) == 1);
|
||||
gcc_assert (GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT);
|
||||
gcc_assert (CONST_INT_P (XVECEXP (trueop1, 0, 0)));
|
||||
|
||||
if (GET_CODE (trueop0) == CONST_VECTOR)
|
||||
return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP
|
||||
@ -2795,7 +2795,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
{
|
||||
rtx x = XVECEXP (trueop1, 0, i);
|
||||
|
||||
gcc_assert (GET_CODE (x) == CONST_INT);
|
||||
gcc_assert (CONST_INT_P (x));
|
||||
RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0,
|
||||
INTVAL (x));
|
||||
}
|
||||
@ -2805,7 +2805,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
}
|
||||
|
||||
if (XVECLEN (trueop1, 0) == 1
|
||||
&& GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT
|
||||
&& CONST_INT_P (XVECEXP (trueop1, 0, 0))
|
||||
&& GET_CODE (trueop0) == VEC_CONCAT)
|
||||
{
|
||||
rtx vec = trueop0;
|
||||
@ -2857,10 +2857,10 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
gcc_assert (GET_MODE_INNER (mode) == op1_mode);
|
||||
|
||||
if ((GET_CODE (trueop0) == CONST_VECTOR
|
||||
|| GET_CODE (trueop0) == CONST_INT
|
||||
|| CONST_INT_P (trueop0)
|
||||
|| GET_CODE (trueop0) == CONST_DOUBLE)
|
||||
&& (GET_CODE (trueop1) == CONST_VECTOR
|
||||
|| GET_CODE (trueop1) == CONST_INT
|
||||
|| CONST_INT_P (trueop1)
|
||||
|| GET_CODE (trueop1) == CONST_DOUBLE))
|
||||
{
|
||||
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
|
||||
@ -3102,8 +3102,8 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
/* We can fold some multi-word operations. */
|
||||
if (GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& width == HOST_BITS_PER_WIDE_INT * 2
|
||||
&& (GET_CODE (op0) == CONST_DOUBLE || GET_CODE (op0) == CONST_INT)
|
||||
&& (GET_CODE (op1) == CONST_DOUBLE || GET_CODE (op1) == CONST_INT))
|
||||
&& (GET_CODE (op0) == CONST_DOUBLE || CONST_INT_P (op0))
|
||||
&& (GET_CODE (op1) == CONST_DOUBLE || CONST_INT_P (op1)))
|
||||
{
|
||||
unsigned HOST_WIDE_INT l1, l2, lv, lt;
|
||||
HOST_WIDE_INT h1, h2, hv, ht;
|
||||
@ -3238,7 +3238,7 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
return immed_double_const (lv, hv, mode);
|
||||
}
|
||||
|
||||
if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
|
||||
if (CONST_INT_P (op0) && CONST_INT_P (op1)
|
||||
&& width <= HOST_BITS_PER_WIDE_INT && width != 0)
|
||||
{
|
||||
/* Get the integer argument values in two forms:
|
||||
@ -3617,8 +3617,8 @@ simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
|
||||
else if (swap_commutative_operands_p (lhs, rhs))
|
||||
tem = lhs, lhs = rhs, rhs = tem;
|
||||
|
||||
if ((GET_CODE (lhs) == CONST || GET_CODE (lhs) == CONST_INT)
|
||||
&& (GET_CODE (rhs) == CONST || GET_CODE (rhs) == CONST_INT))
|
||||
if ((GET_CODE (lhs) == CONST || CONST_INT_P (lhs))
|
||||
&& (GET_CODE (rhs) == CONST || CONST_INT_P (rhs)))
|
||||
{
|
||||
rtx tem_lhs, tem_rhs;
|
||||
|
||||
@ -3645,7 +3645,7 @@ simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
|
||||
lneg &= rneg;
|
||||
if (GET_CODE (tem) == NEG)
|
||||
tem = XEXP (tem, 0), lneg = !lneg;
|
||||
if (GET_CODE (tem) == CONST_INT && lneg)
|
||||
if (CONST_INT_P (tem) && lneg)
|
||||
tem = neg_const_int (mode, tem), lneg = 0;
|
||||
|
||||
ops[i].op = tem;
|
||||
@ -3674,7 +3674,7 @@ simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
|
||||
|
||||
/* Create (minus -C X) instead of (neg (const (plus X C))). */
|
||||
if (n_ops == 2
|
||||
&& GET_CODE (ops[1].op) == CONST_INT
|
||||
&& CONST_INT_P (ops[1].op)
|
||||
&& CONSTANT_P (ops[0].op)
|
||||
&& ops[0].neg)
|
||||
return gen_rtx_fmt_ee (MINUS, mode, ops[1].op, ops[0].op);
|
||||
@ -3686,7 +3686,7 @@ simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0,
|
||||
in the array and that any other constant will be next-to-last. */
|
||||
|
||||
if (n_ops > 1
|
||||
&& GET_CODE (ops[n_ops - 1].op) == CONST_INT
|
||||
&& CONST_INT_P (ops[n_ops - 1].op)
|
||||
&& CONSTANT_P (ops[n_ops - 2].op))
|
||||
{
|
||||
rtx value = ops[n_ops - 1].op;
|
||||
@ -3853,7 +3853,7 @@ simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
(GEU/LTU a -C). Likewise for (LTU/GEU (PLUS a C) a). */
|
||||
if ((code == LTU || code == GEU)
|
||||
&& GET_CODE (op0) == PLUS
|
||||
&& GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op0, 1))
|
||||
&& (rtx_equal_p (op1, XEXP (op0, 0))
|
||||
|| rtx_equal_p (op1, XEXP (op0, 1))))
|
||||
{
|
||||
@ -3970,9 +3970,9 @@ simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode,
|
||||
/* (eq/ne (xor x C1) C2) simplifies to (eq/ne x (C1^C2)). */
|
||||
if ((code == EQ || code == NE)
|
||||
&& op0code == XOR
|
||||
&& (GET_CODE (op1) == CONST_INT
|
||||
&& (CONST_INT_P (op1)
|
||||
|| GET_CODE (op1) == CONST_DOUBLE)
|
||||
&& (GET_CODE (XEXP (op0, 1)) == CONST_INT
|
||||
&& (CONST_INT_P (XEXP (op0, 1))
|
||||
|| GET_CODE (XEXP (op0, 1)) == CONST_DOUBLE))
|
||||
return simplify_gen_relational (code, mode, cmp_mode, XEXP (op0, 0),
|
||||
simplify_gen_binary (XOR, cmp_mode,
|
||||
@ -4122,8 +4122,8 @@ simplify_const_relational_operation (enum rtx_code code,
|
||||
|
||||
if (INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx
|
||||
&& (code == EQ || code == NE)
|
||||
&& ! ((REG_P (op0) || GET_CODE (trueop0) == CONST_INT)
|
||||
&& (REG_P (op1) || GET_CODE (trueop1) == CONST_INT))
|
||||
&& ! ((REG_P (op0) || CONST_INT_P (trueop0))
|
||||
&& (REG_P (op1) || CONST_INT_P (trueop1)))
|
||||
&& 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1))
|
||||
/* We cannot do this if tem is a nonzero address. */
|
||||
&& ! nonzero_address_p (tem))
|
||||
@ -4191,9 +4191,9 @@ simplify_const_relational_operation (enum rtx_code code,
|
||||
/* Otherwise, see if the operands are both integers. */
|
||||
if ((GET_MODE_CLASS (mode) == MODE_INT || mode == VOIDmode)
|
||||
&& (GET_CODE (trueop0) == CONST_DOUBLE
|
||||
|| GET_CODE (trueop0) == CONST_INT)
|
||||
|| CONST_INT_P (trueop0))
|
||||
&& (GET_CODE (trueop1) == CONST_DOUBLE
|
||||
|| GET_CODE (trueop1) == CONST_INT))
|
||||
|| CONST_INT_P (trueop1)))
|
||||
{
|
||||
int width = GET_MODE_BITSIZE (mode);
|
||||
HOST_WIDE_INT l0s, h0s, l1s, h1s;
|
||||
@ -4252,7 +4252,7 @@ simplify_const_relational_operation (enum rtx_code code,
|
||||
/* Optimize comparisons with upper and lower bounds. */
|
||||
if (SCALAR_INT_MODE_P (mode)
|
||||
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
|
||||
&& GET_CODE (trueop1) == CONST_INT)
|
||||
&& CONST_INT_P (trueop1))
|
||||
{
|
||||
int sign;
|
||||
unsigned HOST_WIDE_INT nonzero = nonzero_bits (trueop0, mode);
|
||||
@ -4382,7 +4382,7 @@ simplify_const_relational_operation (enum rtx_code code,
|
||||
if (GET_CODE (op0) == IOR)
|
||||
{
|
||||
rtx inner_const = avoid_constant_pool_reference (XEXP (op0, 1));
|
||||
if (GET_CODE (inner_const) == CONST_INT && inner_const != const0_rtx)
|
||||
if (CONST_INT_P (inner_const) && inner_const != const0_rtx)
|
||||
{
|
||||
int sign_bitnum = GET_MODE_BITSIZE (mode) - 1;
|
||||
int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
|
||||
@ -4485,9 +4485,9 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
{
|
||||
case SIGN_EXTRACT:
|
||||
case ZERO_EXTRACT:
|
||||
if (GET_CODE (op0) == CONST_INT
|
||||
&& GET_CODE (op1) == CONST_INT
|
||||
&& GET_CODE (op2) == CONST_INT
|
||||
if (CONST_INT_P (op0)
|
||||
&& CONST_INT_P (op1)
|
||||
&& CONST_INT_P (op2)
|
||||
&& ((unsigned) INTVAL (op1) + (unsigned) INTVAL (op2) <= width)
|
||||
&& width <= (unsigned) HOST_BITS_PER_WIDE_INT)
|
||||
{
|
||||
@ -4524,7 +4524,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
break;
|
||||
|
||||
case IF_THEN_ELSE:
|
||||
if (GET_CODE (op0) == CONST_INT)
|
||||
if (CONST_INT_P (op0))
|
||||
return op0 != const0_rtx ? op1 : op2;
|
||||
|
||||
/* Convert c ? a : a into "a". */
|
||||
@ -4561,7 +4561,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
rtx temp;
|
||||
|
||||
/* Look for happy constants in op1 and op2. */
|
||||
if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
|
||||
if (CONST_INT_P (op1) && CONST_INT_P (op2))
|
||||
{
|
||||
HOST_WIDE_INT t = INTVAL (op1);
|
||||
HOST_WIDE_INT f = INTVAL (op2);
|
||||
@ -4592,7 +4592,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
/* See if any simplifications were possible. */
|
||||
if (temp)
|
||||
{
|
||||
if (GET_CODE (temp) == CONST_INT)
|
||||
if (CONST_INT_P (temp))
|
||||
return temp == const0_rtx ? op2 : op1;
|
||||
else if (temp)
|
||||
return gen_rtx_IF_THEN_ELSE (mode, temp, op1, op2);
|
||||
@ -4605,7 +4605,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode,
|
||||
gcc_assert (GET_MODE (op1) == mode);
|
||||
gcc_assert (VECTOR_MODE_P (mode));
|
||||
op2 = avoid_constant_pool_reference (op2);
|
||||
if (GET_CODE (op2) == CONST_INT)
|
||||
if (CONST_INT_P (op2))
|
||||
{
|
||||
int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
|
||||
unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size);
|
||||
@ -4672,7 +4672,7 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op,
|
||||
enum machine_mode outer_submode;
|
||||
|
||||
/* Some ports misuse CCmode. */
|
||||
if (GET_MODE_CLASS (outermode) == MODE_CC && GET_CODE (op) == CONST_INT)
|
||||
if (GET_MODE_CLASS (outermode) == MODE_CC && CONST_INT_P (op))
|
||||
return op;
|
||||
|
||||
/* We have no way to represent a complex constant at the rtl level. */
|
||||
@ -4971,7 +4971,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
||||
if (outermode == innermode && !byte)
|
||||
return op;
|
||||
|
||||
if (GET_CODE (op) == CONST_INT
|
||||
if (CONST_INT_P (op)
|
||||
|| GET_CODE (op) == CONST_DOUBLE
|
||||
|| GET_CODE (op) == CONST_FIXED
|
||||
|| GET_CODE (op) == CONST_VECTOR)
|
||||
@ -5204,7 +5204,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
||||
than the sign extension's sign_bit_copies and introduces zeros
|
||||
into the high bits of the result. */
|
||||
&& (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode)
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& GET_CODE (XEXP (op, 0)) == SIGN_EXTEND
|
||||
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
|
||||
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
|
||||
@ -5219,7 +5219,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
||||
|| GET_CODE (op) == ASHIFTRT)
|
||||
&& SCALAR_INT_MODE_P (outermode)
|
||||
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
|
||||
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
|
||||
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode)
|
||||
@ -5233,7 +5233,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
||||
if (GET_CODE (op) == ASHIFT
|
||||
&& SCALAR_INT_MODE_P (outermode)
|
||||
&& GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode)
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND
|
||||
|| GET_CODE (XEXP (op, 0)) == SIGN_EXTEND)
|
||||
&& GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode
|
||||
@ -5248,7 +5248,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
||||
&& SCALAR_INT_MODE_P (outermode)
|
||||
&& GET_MODE_BITSIZE (outermode) >= BITS_PER_WORD
|
||||
&& GET_MODE_BITSIZE (innermode) >= (2 * GET_MODE_BITSIZE (outermode))
|
||||
&& GET_CODE (XEXP (op, 1)) == CONST_INT
|
||||
&& CONST_INT_P (XEXP (op, 1))
|
||||
&& (INTVAL (XEXP (op, 1)) & (GET_MODE_BITSIZE (outermode) - 1)) == 0
|
||||
&& INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (innermode)
|
||||
&& byte == subreg_lowpart_offset (outermode, innermode))
|
||||
|
@ -387,9 +387,7 @@ default_invalid_within_doloop (const_rtx insn)
|
||||
if (CALL_P (insn))
|
||||
return "Function call in loop.";
|
||||
|
||||
if (JUMP_P (insn)
|
||||
&& (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|
||||
|| GET_CODE (PATTERN (insn)) == ADDR_VEC))
|
||||
if (JUMP_TABLE_DATA_P (insn))
|
||||
return "Computed branch in the loop.";
|
||||
|
||||
return NULL;
|
||||
|
@ -406,7 +406,7 @@ stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
|
||||
code = GET_CODE (src);
|
||||
if (! (code == PLUS || code == MINUS)
|
||||
|| XEXP (src, 0) != stack_pointer_rtx
|
||||
|| GET_CODE (XEXP (src, 1)) != CONST_INT)
|
||||
|| !CONST_INT_P (XEXP (src, 1)))
|
||||
return;
|
||||
|
||||
if (code == MINUS)
|
||||
@ -429,7 +429,7 @@ stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
|
||||
rtx val = XEXP (XEXP (src, 1), 1);
|
||||
/* We handle only adjustments by constant amount. */
|
||||
gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS &&
|
||||
GET_CODE (val) == CONST_INT);
|
||||
CONST_INT_P (val));
|
||||
|
||||
if (code == PRE_MODIFY)
|
||||
*pre -= INTVAL (val);
|
||||
@ -2220,9 +2220,9 @@ compute_bb_dataflow (basic_block bb)
|
||||
if (! flag_var_tracking_uninit)
|
||||
status = VAR_INIT_STATUS_INITIALIZED;
|
||||
|
||||
if (GET_CODE (loc) == REG)
|
||||
if (REG_P (loc))
|
||||
var_reg_set (out, loc, status, NULL);
|
||||
else if (GET_CODE (loc) == MEM)
|
||||
else if (MEM_P (loc))
|
||||
var_mem_set (out, loc, status, NULL);
|
||||
}
|
||||
break;
|
||||
@ -2953,17 +2953,16 @@ emit_note_insn_var_location (void **varp, void *data)
|
||||
}
|
||||
else if (MEM_P (loc[n_var_parts])
|
||||
&& GET_CODE (XEXP (loc2, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (loc2, 0), 0)) == REG
|
||||
&& GET_CODE (XEXP (XEXP (loc2, 0), 1)) == CONST_INT)
|
||||
&& REG_P (XEXP (XEXP (loc2, 0), 0))
|
||||
&& CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
|
||||
{
|
||||
if ((GET_CODE (XEXP (loc[n_var_parts], 0)) == REG
|
||||
if ((REG_P (XEXP (loc[n_var_parts], 0))
|
||||
&& rtx_equal_p (XEXP (loc[n_var_parts], 0),
|
||||
XEXP (XEXP (loc2, 0), 0))
|
||||
&& INTVAL (XEXP (XEXP (loc2, 0), 1))
|
||||
== GET_MODE_SIZE (mode))
|
||||
|| (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
||||
== CONST_INT
|
||||
&& CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
||||
&& rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
|
||||
XEXP (XEXP (loc2, 0), 0))
|
||||
&& INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
|
||||
@ -3152,7 +3151,7 @@ emit_notes_in_bb (basic_block bb)
|
||||
enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
|
||||
if (! flag_var_tracking_uninit)
|
||||
status = VAR_INIT_STATUS_INITIALIZED;
|
||||
if (GET_CODE (loc) == REG)
|
||||
if (REG_P (loc))
|
||||
var_reg_set (&set, loc, status, NULL);
|
||||
else
|
||||
var_mem_set (&set, loc, status, NULL);
|
||||
|
@ -490,7 +490,7 @@ addr_const_to_string (char *str, rtx x)
|
||||
|
||||
case PLUS:
|
||||
/* Some assemblers need integer constants to appear last (eg masm). */
|
||||
if (GET_CODE (XEXP (x, 0)) == CONST_INT)
|
||||
if (CONST_INT_P (XEXP (x, 0)))
|
||||
{
|
||||
addr_const_to_string (buf1, XEXP (x, 1));
|
||||
strcat (str, buf1);
|
||||
@ -520,7 +520,7 @@ addr_const_to_string (char *str, rtx x)
|
||||
addr_const_to_string (buf1, XEXP (x, 0));
|
||||
strcat (str, buf1);
|
||||
strcat (str, "-");
|
||||
if (GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
if (CONST_INT_P (XEXP (x, 1))
|
||||
&& INTVAL (XEXP (x, 1)) < 0)
|
||||
{
|
||||
strcat (str, "(");
|
||||
|
Loading…
Reference in New Issue
Block a user