i386.md: Use REG_P...

* config/i386/i386.md: Use REG_P, MEM_P, CONST_INT_P, LABEL_P,
	JUMP_P and CALL_P predicates where applicable.
	* config/i386/i386.c: Ditto.
	* config/i386/i386.md: Ditto.
	* config/i386/mmx.md: Ditto.
	* config/i386/predicates.md: Ditto.

From-SVN: r121079
This commit is contained in:
Uros Bizjak 2007-01-23 08:14:26 +01:00
parent c56b658bde
commit 7656aee4d0
5 changed files with 306 additions and 295 deletions

View File

@ -1,3 +1,12 @@
2007-01-23 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md: Use REG_P, MEM_P, CONST_INT_P, LABEL_P,
JUMP_P and CALL_P predicates where applicable.
* config/i386/i386.c: Ditto.
* config/i386/i386.md: Ditto.
* config/i386/mmx.md: Ditto.
* config/i386/predicates.md: Ditto.
2007-01-22 Andreas Schwab <schwab@suse.de>
* config/m68k/m68k.h: Fix comment.
@ -42,8 +51,8 @@
2007-01-21 Jan Hubicka <jh@suse.cz>
* ipa-inline.c (inlining_mode): Comment, move up.
(cgraph_decide_inlining_incrementally): Do not perform inlining itself;
fix handling of flattening of self recursive functions.
(cgraph_decide_inlining_incrementally): Do not perform inlining
itself; fix handling of flattening of self recursive functions.
(cgraph_find_cycles): Remove.
(cgraph_flatten_node): Remove.
(cgraph_decide_inlining): Use incremental inliner to handle flattening.

View File

@ -4708,7 +4708,7 @@ ix86_check_movabs (rtx insn, int opnum)
mem = XEXP (set, opnum);
while (GET_CODE (mem) == SUBREG)
mem = SUBREG_REG (mem);
gcc_assert (GET_CODE (mem) == MEM);
gcc_assert (MEM_P (mem));
return (volatile_ok || !MEM_VOLATILE_P (mem));
}
@ -5959,7 +5959,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
int retval = 1;
enum ix86_address_seg seg = SEG_DEFAULT;
if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
if (REG_P (addr) || GET_CODE (addr) == SUBREG)
base = addr;
else if (GET_CODE (addr) == PLUS)
{
@ -6036,7 +6036,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
/* We're called for lea too, which implements ashift on occasion. */
index = XEXP (addr, 0);
tmp = XEXP (addr, 1);
if (GET_CODE (tmp) != CONST_INT)
if (!CONST_INT_P (tmp))
return 0;
scale = INTVAL (tmp);
if ((unsigned HOST_WIDE_INT) scale > 3)
@ -6050,7 +6050,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
/* Extract the integral value of scale. */
if (scale_rtx)
{
if (GET_CODE (scale_rtx) != CONST_INT)
if (!CONST_INT_P (scale_rtx))
return 0;
scale = INTVAL (scale_rtx);
}
@ -6179,7 +6179,7 @@ ix86_find_base_term (rtx x)
return x;
term = XEXP (x, 0);
if (GET_CODE (term) == PLUS
&& (GET_CODE (XEXP (term, 1)) == CONST_INT
&& (CONST_INT_P (XEXP (term, 1))
|| GET_CODE (XEXP (term, 1)) == CONST_DOUBLE))
term = XEXP (term, 0);
if (GET_CODE (term) != UNSPEC
@ -6239,7 +6239,7 @@ legitimate_constant_p (rtx x)
if (GET_CODE (x) == PLUS)
{
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
if (!CONST_INT_P (XEXP (x, 1)))
return false;
x = XEXP (x, 0);
}
@ -6342,7 +6342,7 @@ legitimate_pic_operand_p (rtx x)
case CONST:
inner = XEXP (x, 0);
if (GET_CODE (inner) == PLUS
&& GET_CODE (XEXP (inner, 1)) == CONST_INT)
&& CONST_INT_P (XEXP (inner, 1)))
inner = XEXP (inner, 0);
/* Only some unspecs are valid as "constants". */
@ -6393,7 +6393,7 @@ legitimate_pic_address_disp_p (rtx disp)
break;
op0 = XEXP (XEXP (disp, 0), 0);
op1 = XEXP (XEXP (disp, 0), 1);
if (GET_CODE (op1) != CONST_INT
if (!CONST_INT_P (op1)
|| INTVAL (op1) >= 16*1024*1024
|| INTVAL (op1) < -16*1024*1024)
break;
@ -6437,7 +6437,7 @@ legitimate_pic_address_disp_p (rtx disp)
saw_plus = false;
if (GET_CODE (disp) == PLUS)
{
if (GET_CODE (XEXP (disp, 1)) != CONST_INT)
if (!CONST_INT_P (XEXP (disp, 1)))
return 0;
disp = XEXP (disp, 0);
saw_plus = true;
@ -6665,7 +6665,7 @@ legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
if (GET_CODE (disp) != CONST
|| GET_CODE (XEXP (disp, 0)) != PLUS
|| GET_CODE (XEXP (XEXP (disp, 0), 0)) != UNSPEC
|| GET_CODE (XEXP (XEXP (disp, 0), 1)) != CONST_INT
|| !CONST_INT_P (XEXP (XEXP (disp, 0), 1))
|| (XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_DTPOFF
&& XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_NTPOFF))
{
@ -6702,7 +6702,7 @@ legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
correct fix for crash to disable this test. */
}
else if (GET_CODE (disp) != LABEL_REF
&& GET_CODE (disp) != CONST_INT
&& !CONST_INT_P (disp)
&& (GET_CODE (disp) != CONST
|| !legitimate_constant_p (disp))
&& (GET_CODE (disp) != SYMBOL_REF
@ -6878,7 +6878,7 @@ legitimize_pic_address (rtx orig, rtx reg)
}
else
{
if (GET_CODE (addr) == CONST_INT
if (CONST_INT_P (addr)
&& !x86_64_immediate_operand (addr, VOIDmode))
{
if (reg)
@ -6909,7 +6909,7 @@ legitimize_pic_address (rtx orig, rtx reg)
/* Check first to see if this is a constant offset from a @GOTOFF
symbol reference. */
if (local_symbolic_operand (op0, Pmode)
&& GET_CODE (op1) == CONST_INT)
&& CONST_INT_P (op1))
{
if (!TARGET_64BIT)
{
@ -6944,7 +6944,7 @@ legitimize_pic_address (rtx orig, rtx reg)
new = legitimize_pic_address (XEXP (addr, 1),
base == reg ? NULL_RTX : reg);
if (GET_CODE (new) == CONST_INT)
if (CONST_INT_P (new))
new = plus_constant (base, INTVAL (new));
else
{
@ -7186,7 +7186,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
/* Canonicalize shifts by 0, 1, 2, 3 into multiply */
if (GET_CODE (x) == ASHIFT
&& GET_CODE (XEXP (x, 1)) == CONST_INT
&& CONST_INT_P (XEXP (x, 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) < 4)
{
changed = 1;
@ -7200,7 +7200,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
/* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
if (GET_CODE (XEXP (x, 0)) == ASHIFT
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
&& CONST_INT_P (XEXP (XEXP (x, 0), 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (x, 0), 1)) < 4)
{
changed = 1;
@ -7211,7 +7211,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
}
if (GET_CODE (XEXP (x, 1)) == ASHIFT
&& GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
&& CONST_INT_P (XEXP (XEXP (x, 1), 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (x, 1), 1)) < 4)
{
changed = 1;
@ -7254,12 +7254,12 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
rtx constant;
rtx other = NULL_RTX;
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
if (CONST_INT_P (XEXP (x, 1)))
{
constant = XEXP (x, 1);
other = XEXP (XEXP (XEXP (x, 0), 1), 1);
}
else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
else if (CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 1), 1)))
{
constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
other = XEXP (x, 1);
@ -7293,8 +7293,8 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
}
if (changed
&& GET_CODE (XEXP (x, 1)) == REG
&& GET_CODE (XEXP (x, 0)) == REG)
&& REG_P (XEXP (x, 1))
&& REG_P (XEXP (x, 0)))
return x;
if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
@ -7306,7 +7306,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
if (changed && legitimate_address_p (mode, x, FALSE))
return x;
if (GET_CODE (XEXP (x, 0)) == REG)
if (REG_P (XEXP (x, 0)))
{
rtx temp = gen_reg_rtx (Pmode);
rtx val = force_operand (XEXP (x, 1), temp);
@ -7317,7 +7317,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
return x;
}
else if (GET_CODE (XEXP (x, 1)) == REG)
else if (REG_P (XEXP (x, 1)))
{
rtx temp = gen_reg_rtx (Pmode);
rtx val = force_operand (XEXP (x, 0), temp);
@ -7392,7 +7392,7 @@ output_pic_addr_const (FILE *file, rtx x, int code)
case PLUS:
/* Some assemblers need integer constants to appear first. */
if (GET_CODE (XEXP (x, 0)) == CONST_INT)
if (CONST_INT_P (XEXP (x, 0)))
{
output_pic_addr_const (file, XEXP (x, 0), code);
putc ('+', file);
@ -7400,7 +7400,7 @@ output_pic_addr_const (FILE *file, rtx x, int code)
}
else
{
gcc_assert (GET_CODE (XEXP (x, 1)) == CONST_INT);
gcc_assert (CONST_INT_P (XEXP (x, 1)));
output_pic_addr_const (file, XEXP (x, 1), code);
putc ('+', file);
output_pic_addr_const (file, XEXP (x, 0), code);
@ -7509,7 +7509,7 @@ ix86_delegitimize_address (rtx orig_x)
/* This is the result, or NULL. */
rtx result = NULL_RTX;
if (GET_CODE (x) == MEM)
if (MEM_P (x))
x = XEXP (x, 0);
if (TARGET_64BIT)
@ -7517,7 +7517,7 @@ ix86_delegitimize_address (rtx orig_x)
if (GET_CODE (x) != CONST
|| GET_CODE (XEXP (x, 0)) != UNSPEC
|| XINT (XEXP (x, 0), 1) != UNSPEC_GOTPCREL
|| GET_CODE (orig_x) != MEM)
|| !MEM_P (orig_x))
return orig_x;
return XVECEXP (XEXP (x, 0), 0, 0);
}
@ -7526,7 +7526,7 @@ ix86_delegitimize_address (rtx orig_x)
|| GET_CODE (XEXP (x, 1)) != CONST)
return orig_x;
if (GET_CODE (XEXP (x, 0)) == REG
if (REG_P (XEXP (x, 0))
&& REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
/* %ebx + GOT/GOTOFF */
;
@ -7534,15 +7534,15 @@ ix86_delegitimize_address (rtx orig_x)
{
/* %ebx + %reg * scale + GOT/GOTOFF */
reg_addend = XEXP (x, 0);
if (GET_CODE (XEXP (reg_addend, 0)) == REG
if (REG_P (XEXP (reg_addend, 0))
&& REGNO (XEXP (reg_addend, 0)) == PIC_OFFSET_TABLE_REGNUM)
reg_addend = XEXP (reg_addend, 1);
else if (GET_CODE (XEXP (reg_addend, 1)) == REG
else if (REG_P (XEXP (reg_addend, 1))
&& REGNO (XEXP (reg_addend, 1)) == PIC_OFFSET_TABLE_REGNUM)
reg_addend = XEXP (reg_addend, 0);
else
return orig_x;
if (GET_CODE (reg_addend) != REG
if (!REG_P (reg_addend)
&& GET_CODE (reg_addend) != MULT
&& GET_CODE (reg_addend) != ASHIFT)
return orig_x;
@ -7552,19 +7552,19 @@ ix86_delegitimize_address (rtx orig_x)
x = XEXP (XEXP (x, 1), 0);
if (GET_CODE (x) == PLUS
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
&& CONST_INT_P (XEXP (x, 1)))
{
const_addend = XEXP (x, 1);
x = XEXP (x, 0);
}
if (GET_CODE (x) == UNSPEC
&& ((XINT (x, 1) == UNSPEC_GOT && GET_CODE (orig_x) == MEM)
|| (XINT (x, 1) == UNSPEC_GOTOFF && GET_CODE (orig_x) != MEM)))
&& ((XINT (x, 1) == UNSPEC_GOT && MEM_P (orig_x))
|| (XINT (x, 1) == UNSPEC_GOTOFF && !MEM_P (orig_x))))
result = XVECEXP (x, 0, 0);
if (TARGET_MACHO && darwin_local_data_pic (x)
&& GET_CODE (orig_x) != MEM)
&& !MEM_P (orig_x))
result = XEXP (x, 0);
if (! result)
@ -7862,7 +7862,7 @@ print_operand (FILE *file, rtx x, int code)
case ASM_INTEL:
/* Intel syntax. For absolute addresses, registers should not
be surrounded by braces. */
if (GET_CODE (x) != REG)
if (!REG_P (x))
{
putc ('[', file);
PRINT_OPERAND (file, x, 0);
@ -7976,7 +7976,7 @@ print_operand (FILE *file, rtx x, int code)
break;
case 's':
if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
if (CONST_INT_P (x) || ! SHIFT_DOUBLE_OMITS_COUNT)
{
PRINT_OPERAND (file, x, 0);
putc (',', file);
@ -8114,10 +8114,10 @@ print_operand (FILE *file, rtx x, int code)
}
}
if (GET_CODE (x) == REG)
if (REG_P (x))
print_reg (x, code, file);
else if (GET_CODE (x) == MEM)
else if (MEM_P (x))
{
/* No `byte ptr' prefix for call instructions. */
if (ASSEMBLER_DIALECT == ASM_INTEL && code != 'X' && code != 'P')
@ -8150,7 +8150,7 @@ print_operand (FILE *file, rtx x, int code)
x = XEXP (x, 0);
/* Avoid (%rip) for call operands. */
if (CONSTANT_ADDRESS_P (x) && code == 'P'
&& GET_CODE (x) != CONST_INT)
&& !CONST_INT_P (x))
output_addr_const (file, x);
else if (this_is_asm_operands && ! address_operand (x, VOIDmode))
output_operand_lossage ("invalid constraints for operand");
@ -8202,7 +8202,7 @@ print_operand (FILE *file, rtx x, int code)
if (code != 'P')
{
if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
if (CONST_INT_P (x) || GET_CODE (x) == CONST_DOUBLE)
{
if (ASSEMBLER_DIALECT == ASM_ATT)
putc ('$', file);
@ -8216,7 +8216,7 @@ print_operand (FILE *file, rtx x, int code)
fputs ("OFFSET FLAT:", file);
}
}
if (GET_CODE (x) == CONST_INT)
if (CONST_INT_P (x))
fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
else if (flag_pic)
output_pic_addr_const (file, x, code);
@ -8260,7 +8260,7 @@ print_operand_address (FILE *file, rtx addr)
{
/* Displacement only requires special attention. */
if (GET_CODE (disp) == CONST_INT)
if (CONST_INT_P (disp))
{
if (ASSEMBLER_DIALECT == ASM_INTEL && parts.seg == SEG_DEFAULT)
{
@ -8280,7 +8280,7 @@ print_operand_address (FILE *file, rtx addr)
{
if (GET_CODE (disp) == CONST
&& GET_CODE (XEXP (disp, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
&& CONST_INT_P (XEXP (XEXP (disp, 0), 1)))
disp = XEXP (XEXP (disp, 0), 0);
if (GET_CODE (disp) == LABEL_REF
|| (GET_CODE (disp) == SYMBOL_REF
@ -8323,7 +8323,7 @@ print_operand_address (FILE *file, rtx addr)
/* Pull out the offset of a symbol; print any symbol itself. */
if (GET_CODE (disp) == CONST
&& GET_CODE (XEXP (disp, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
&& CONST_INT_P (XEXP (XEXP (disp, 0), 1)))
{
offset = XEXP (XEXP (disp, 0), 1);
disp = gen_rtx_CONST (VOIDmode,
@ -8334,7 +8334,7 @@ print_operand_address (FILE *file, rtx addr)
output_pic_addr_const (file, disp, 0);
else if (GET_CODE (disp) == LABEL_REF)
output_asm_label (disp);
else if (GET_CODE (disp) == CONST_INT)
else if (CONST_INT_P (disp))
offset = disp;
else
output_addr_const (file, disp);
@ -8433,7 +8433,7 @@ split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
/* simplify_subreg refuse to split volatile memory addresses,
but we still have to handle it. */
if (GET_CODE (op) == MEM)
if (MEM_P (op))
{
lo_half[num] = adjust_address (op, SImode, 0);
hi_half[num] = adjust_address (op, SImode, 4);
@ -8464,7 +8464,7 @@ split_ti (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
/* simplify_subreg refuse to split volatile memory addresses, but we
still have to handle it. */
if (GET_CODE (op) == MEM)
if (MEM_P (op))
{
lo_half[num] = adjust_address (op, DImode, 0);
hi_half[num] = adjust_address (op, DImode, 8);
@ -8508,10 +8508,10 @@ output_387_binary_op (rtx insn, rtx *operands)
if (STACK_REG_P (operands[0])
&& ((REG_P (operands[1])
&& REGNO (operands[0]) == REGNO (operands[1])
&& (STACK_REG_P (operands[2]) || GET_CODE (operands[2]) == MEM))
&& (STACK_REG_P (operands[2]) || MEM_P (operands[2])))
|| (REG_P (operands[2])
&& REGNO (operands[0]) == REGNO (operands[2])
&& (STACK_REG_P (operands[1]) || GET_CODE (operands[1]) == MEM)))
&& (STACK_REG_P (operands[1]) || MEM_P (operands[1]))))
&& (STACK_TOP_P (operands[1]) || STACK_TOP_P (operands[2])))
; /* ok */
else
@ -8584,7 +8584,7 @@ output_387_binary_op (rtx insn, rtx *operands)
/* know operands[0] == operands[1]. */
if (GET_CODE (operands[2]) == MEM)
if (MEM_P (operands[2]))
{
p = "%z2\t%2";
break;
@ -8614,13 +8614,13 @@ output_387_binary_op (rtx insn, rtx *operands)
case MINUS:
case DIV:
if (GET_CODE (operands[1]) == MEM)
if (MEM_P (operands[1]))
{
p = "r%z1\t%1";
break;
}
if (GET_CODE (operands[2]) == MEM)
if (MEM_P (operands[2]))
{
p = "%z2\t%2";
break;
@ -8859,7 +8859,7 @@ output_fix_trunc (rtx insn, rtx *operands, int fisttp)
output_asm_insn ("fld\t%y1", operands);
gcc_assert (STACK_TOP_P (operands[1]));
gcc_assert (GET_CODE (operands[0]) == MEM);
gcc_assert (MEM_P (operands[0]));
if (fisttp)
output_asm_insn ("fisttp%z0\t%0", operands);
@ -9147,7 +9147,7 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
if (MACHOPIC_PURE)
{
rtx temp = ((reload_in_progress
|| ((op0 && GET_CODE (op0) == REG)
|| ((op0 && REG_P (op0))
&& mode == Pmode))
? op0 : gen_reg_rtx (Pmode));
op1 = machopic_indirect_data_reference (op1, temp);
@ -9162,7 +9162,7 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
}
else
{
if (GET_CODE (op0) == MEM)
if (MEM_P (op0))
op1 = force_reg (Pmode, op1);
else
op1 = legitimize_address (op1, op1, Pmode);
@ -9170,10 +9170,10 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
}
else
{
if (GET_CODE (op0) == MEM
if (MEM_P (op0)
&& (PUSH_ROUNDING (GET_MODE_SIZE (mode)) != GET_MODE_SIZE (mode)
|| !push_operand (op0, mode))
&& GET_CODE (op1) == MEM)
&& MEM_P (op1))
op1 = force_reg (mode, op1);
if (push_operand (op0, mode)
@ -9408,7 +9408,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
/* If the destination is memory, and we do not have matching source
operands, do things in registers. */
matching_memory = 0;
if (GET_CODE (dst) == MEM)
if (MEM_P (dst))
{
if (rtx_equal_p (dst, src1))
matching_memory = 1;
@ -9420,7 +9420,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
}
/* Both source operands cannot be in memory. */
if (GET_CODE (src1) == MEM && GET_CODE (src2) == MEM)
if (MEM_P (src1) && MEM_P (src2))
{
if (matching_memory != 2)
src2 = force_reg (mode, src2);
@ -9431,7 +9431,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
/* If the operation is not commutable, source 1 cannot be a constant
or non-matching memory. */
if ((CONSTANT_P (src1)
|| (!matching_memory && GET_CODE (src1) == MEM))
|| (!matching_memory && MEM_P (src1)))
&& GET_RTX_CLASS (code) != RTX_COMM_ARITH)
src1 = force_reg (mode, src1);
@ -9495,20 +9495,20 @@ ix86_binary_operator_ok (enum rtx_code code,
rtx operands[3])
{
/* Both source operands cannot be in memory. */
if (GET_CODE (operands[1]) == MEM && GET_CODE (operands[2]) == MEM)
if (MEM_P (operands[1]) && MEM_P (operands[2]))
return 0;
/* If the operation is not commutable, source 1 cannot be a constant. */
if (CONSTANT_P (operands[1]) && GET_RTX_CLASS (code) != RTX_COMM_ARITH)
return 0;
/* If the destination is memory, we must have a matching source operand. */
if (GET_CODE (operands[0]) == MEM
if (MEM_P (operands[0])
&& ! (rtx_equal_p (operands[0], operands[1])
|| (GET_RTX_CLASS (code) == RTX_COMM_ARITH
&& rtx_equal_p (operands[0], operands[2]))))
return 0;
/* If the operation is not commutable and the source 1 is memory, we must
have a matching destination. */
if (GET_CODE (operands[1]) == MEM
if (MEM_P (operands[1])
&& GET_RTX_CLASS (code) != RTX_COMM_ARITH
&& ! rtx_equal_p (operands[0], operands[1]))
return 0;
@ -9574,8 +9574,8 @@ ix86_unary_operator_ok (enum rtx_code code ATTRIBUTE_UNUSED,
rtx operands[2] ATTRIBUTE_UNUSED)
{
/* If one of operands is memory, source and destination must match. */
if ((GET_CODE (operands[0]) == MEM
|| GET_CODE (operands[1]) == MEM)
if ((MEM_P (operands[0])
|| MEM_P (operands[1]))
&& ! rtx_equal_p (operands[0], operands[1]))
return FALSE;
return TRUE;
@ -10085,16 +10085,16 @@ ix86_prepare_fp_compare_args (enum rtx_code code, rtx *pop0, rtx *pop1)
into a register. */
if (standard_80387_constant_p (op0) == 0
|| (GET_CODE (op0) == MEM
|| (MEM_P (op0)
&& ! (standard_80387_constant_p (op1) == 0
|| GET_CODE (op1) == MEM)))
|| MEM_P (op1))))
{
rtx tmp;
tmp = op0, op0 = op1, op1 = tmp;
code = swap_condition (code);
}
if (GET_CODE (op0) != REG)
if (!REG_P (op0))
op0 = force_reg (op_mode, op0);
if (CONSTANT_P (op1))
@ -10115,12 +10115,12 @@ ix86_prepare_fp_compare_args (enum rtx_code code, rtx *pop0, rtx *pop1)
/* Try to rearrange the comparison to make it cheaper. */
if (ix86_fp_comparison_cost (code)
> ix86_fp_comparison_cost (swap_condition (code))
&& (GET_CODE (op1) == REG || !no_new_pseudos))
&& (REG_P (op1) || !no_new_pseudos))
{
rtx tmp;
tmp = op0, op0 = op1, op1 = tmp;
code = swap_condition (code);
if (GET_CODE (op0) != REG)
if (!REG_P (op0))
op0 = force_reg (op_mode, op0);
}
@ -10674,7 +10674,7 @@ ix86_expand_branch (enum rtx_code code, rtx label)
op1 is a constant and the low word is zero, then we can just
examine the high word. */
if (GET_CODE (hi[1]) == CONST_INT && lo[1] == const0_rtx)
if (CONST_INT_P (hi[1]) && lo[1] == const0_rtx)
switch (code)
{
case LT: case LTU: case GE: case GEU:
@ -10948,7 +10948,7 @@ ix86_expand_carry_flag_compare (enum rtx_code code, rtx op0, rtx op1, rtx *pop)
/* Convert a>b into b<a or a>=b-1. */
case GTU:
case LEU:
if (GET_CODE (op1) == CONST_INT)
if (CONST_INT_P (op1))
{
op1 = gen_int_mode (INTVAL (op1) + 1, GET_MODE (op0));
/* Bail out on overflow. We still can swap operands but that
@ -11025,8 +11025,8 @@ ix86_expand_int_movcc (rtx operands[])
if ((mode != HImode || TARGET_FAST_PREFIX)
&& (mode != (TARGET_64BIT ? TImode : DImode))
&& GET_CODE (operands[2]) == CONST_INT
&& GET_CODE (operands[3]) == CONST_INT)
&& CONST_INT_P (operands[2])
&& CONST_INT_P (operands[3]))
{
rtx out = operands[0];
HOST_WIDE_INT ct = INTVAL (operands[2]);
@ -11201,7 +11201,7 @@ ix86_expand_int_movcc (rtx operands[])
compare_code = UNKNOWN;
if (GET_MODE_CLASS (GET_MODE (ix86_compare_op0)) == MODE_INT
&& GET_CODE (ix86_compare_op1) == CONST_INT)
&& CONST_INT_P (ix86_compare_op1))
{
if (ix86_compare_op1 == const0_rtx
&& (code == LT || code == GE))
@ -11413,7 +11413,7 @@ ix86_expand_int_movcc (rtx operands[])
/* If one of the two operands is an interesting constant, load a
constant with the above and mask it in with a logical operation. */
if (GET_CODE (operands[2]) == CONST_INT)
if (CONST_INT_P (operands[2]))
{
var = operands[3];
if (INTVAL (operands[2]) == 0 && operands[3] != constm1_rtx)
@ -11423,7 +11423,7 @@ ix86_expand_int_movcc (rtx operands[])
else
return 0; /* FAIL */
}
else if (GET_CODE (operands[3]) == CONST_INT)
else if (CONST_INT_P (operands[3]))
{
var = operands[2];
if (INTVAL (operands[3]) == 0 && operands[2] != constm1_rtx)
@ -12055,19 +12055,19 @@ ix86_split_to_parts (rtx operand, rtx *parts, enum machine_mode mode)
else
size = (GET_MODE_SIZE (mode) + 4) / 8;
gcc_assert (GET_CODE (operand) != REG || !MMX_REGNO_P (REGNO (operand)));
gcc_assert (!REG_P (operand) || !MMX_REGNO_P (REGNO (operand)));
gcc_assert (size >= 2 && size <= 3);
/* Optimize constant pool reference to immediates. This is used by fp
moves, that force all constants to memory to allow combining. */
if (GET_CODE (operand) == MEM && MEM_READONLY_P (operand))
if (MEM_P (operand) && MEM_READONLY_P (operand))
{
rtx tmp = maybe_get_pool_constant (operand);
if (tmp)
operand = tmp;
}
if (GET_CODE (operand) == MEM && !offsettable_memref_p (operand))
if (MEM_P (operand) && !offsettable_memref_p (operand))
{
/* The only non-offsetable memories we handle are pushes. */
int ok = push_operand (operand, VOIDmode);
@ -12216,7 +12216,7 @@ ix86_split_long_move (rtx operands[])
/* Optimize constant pool reference to immediates. This is used by
fp moves, that force all constants to memory to allow combining. */
if (GET_CODE (operands[1]) == MEM
if (MEM_P (operands[1])
&& GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (operands[1], 0)))
operands[1] = get_pool_constant (XEXP (operands[1], 0));
@ -12236,14 +12236,14 @@ ix86_split_long_move (rtx operands[])
if (push_operand (operands[0], VOIDmode))
push = 1;
else
gcc_assert (GET_CODE (operands[0]) != MEM
gcc_assert (!MEM_P (operands[0])
|| offsettable_memref_p (operands[0]));
nparts = ix86_split_to_parts (operands[1], part[1], GET_MODE (operands[0]));
ix86_split_to_parts (operands[0], part[0], GET_MODE (operands[0]));
/* When emitting push, take care for source operands on the stack. */
if (push && GET_CODE (operands[1]) == MEM
if (push && MEM_P (operands[1])
&& reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
{
if (nparts == 3)
@ -12255,7 +12255,7 @@ ix86_split_long_move (rtx operands[])
/* We need to do copy in the right order in case an address register
of the source overlaps the destination. */
if (REG_P (part[0][0]) && GET_CODE (part[1][0]) == MEM)
if (REG_P (part[0][0]) && MEM_P (part[1][0]))
{
if (reg_overlap_mentioned_p (part[0][0], XEXP (part[1][0], 0)))
collisions++;
@ -12390,25 +12390,25 @@ ix86_split_long_move (rtx operands[])
/* If optimizing for size, attempt to locally unCSE nonzero constants. */
if (optimize_size)
{
if (GET_CODE (operands[5]) == CONST_INT
if (CONST_INT_P (operands[5])
&& operands[5] != const0_rtx
&& REG_P (operands[2]))
{
if (GET_CODE (operands[6]) == CONST_INT
if (CONST_INT_P (operands[6])
&& INTVAL (operands[6]) == INTVAL (operands[5]))
operands[6] = operands[2];
if (nparts == 3
&& GET_CODE (operands[7]) == CONST_INT
&& CONST_INT_P (operands[7])
&& INTVAL (operands[7]) == INTVAL (operands[5]))
operands[7] = operands[2];
}
if (nparts == 3
&& GET_CODE (operands[6]) == CONST_INT
&& CONST_INT_P (operands[6])
&& operands[6] != const0_rtx
&& REG_P (operands[3])
&& GET_CODE (operands[7]) == CONST_INT
&& CONST_INT_P (operands[7])
&& INTVAL (operands[7]) == INTVAL (operands[6]))
operands[7] = operands[3];
}
@ -12458,7 +12458,7 @@ ix86_split_ashl (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
if (GET_CODE (operands[2]) == CONST_INT)
if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@ -12585,7 +12585,7 @@ ix86_split_ashr (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
if (GET_CODE (operands[2]) == CONST_INT)
if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@ -12664,7 +12664,7 @@ ix86_split_lshr (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
if (GET_CODE (operands[2]) == CONST_INT)
if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@ -12725,7 +12725,7 @@ static void
predict_jump (int prob)
{
rtx insn = get_last_insn ();
gcc_assert (GET_CODE (insn) == JUMP_INSN);
gcc_assert (JUMP_P (insn));
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB,
GEN_INT (prob),
@ -12785,7 +12785,7 @@ scale_counter (rtx countreg, int scale)
if (scale == 1)
return countreg;
if (GET_CODE (countreg) == CONST_INT)
if (CONST_INT_P (countreg))
return GEN_INT (INTVAL (countreg) / scale);
gcc_assert (REG_P (countreg));
@ -12948,7 +12948,7 @@ expand_movmem_via_rep_mov (rtx destmem, rtx srcmem,
rtx countreg;
/* If the size is known, it is shorter to use rep movs. */
if (mode == QImode && GET_CODE (count) == CONST_INT
if (mode == QImode && CONST_INT_P (count)
&& !(INTVAL (count) & 3))
mode = SImode;
@ -13015,7 +13015,7 @@ expand_movmem_epilogue (rtx destmem, rtx srcmem,
rtx destptr, rtx srcptr, rtx count, int max_size)
{
rtx src, dest;
if (GET_CODE (count) == CONST_INT)
if (CONST_INT_P (count))
{
HOST_WIDE_INT countval = INTVAL (count);
int offset = 0;
@ -13168,7 +13168,7 @@ expand_setmem_epilogue (rtx destmem, rtx destptr, rtx value, rtx count, int max_
{
rtx dest;
if (GET_CODE (count) == CONST_INT)
if (CONST_INT_P (count))
{
HOST_WIDE_INT countval = INTVAL (count);
int offset = 0;
@ -13551,15 +13551,15 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
enum stringop_alg alg;
int dynamic_check;
if (GET_CODE (align_exp) == CONST_INT)
if (CONST_INT_P (align_exp))
align = INTVAL (align_exp);
/* i386 can do misaligned access on reasonably increased cost. */
if (GET_CODE (expected_align_exp) == CONST_INT
if (CONST_INT_P (expected_align_exp)
&& INTVAL (expected_align_exp) > align)
align = INTVAL (expected_align_exp);
if (GET_CODE (count_exp) == CONST_INT)
if (CONST_INT_P (count_exp))
count = expected_size = INTVAL (count_exp);
if (GET_CODE (expected_size_exp) == CONST_INT && count == 0)
if (CONST_INT_P (expected_size_exp) && count == 0)
expected_size = INTVAL (expected_size_exp);
/* Step 0: Decide on preferred algorithm, desired alignment and
@ -13606,7 +13606,7 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
/* Step 1: Prologue guard. */
/* Alignment code needs count to be in register. */
if (GET_CODE (count_exp) == CONST_INT && desired_align > align)
if (CONST_INT_P (count_exp) && desired_align > align)
{
enum machine_mode mode = SImode;
if (TARGET_64BIT && (count & ~0xffffffff))
@ -13704,7 +13704,7 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
break;
}
/* Adjust properly the offset of src and dest memory for aliasing. */
if (GET_CODE (count_exp) == CONST_INT)
if (CONST_INT_P (count_exp))
{
src = adjust_automodify_address_nv (src, BLKmode, srcreg,
(count / size_needed) * size_needed);
@ -13762,7 +13762,7 @@ promote_duplicated_reg (enum machine_mode mode, rtx val)
gcc_assert (mode == SImode || mode == DImode);
if (val == const0_rtx)
return copy_to_mode_reg (mode, const0_rtx);
if (GET_CODE (val) == CONST_INT)
if (CONST_INT_P (val))
{
HOST_WIDE_INT v = INTVAL (val) & 255;
@ -13861,15 +13861,15 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
bool force_loopy_epilogue = false;
int dynamic_check;
if (GET_CODE (align_exp) == CONST_INT)
if (CONST_INT_P (align_exp))
align = INTVAL (align_exp);
/* i386 can do misaligned access on reasonably increased cost. */
if (GET_CODE (expected_align_exp) == CONST_INT
if (CONST_INT_P (expected_align_exp)
&& INTVAL (expected_align_exp) > align)
align = INTVAL (expected_align_exp);
if (GET_CODE (count_exp) == CONST_INT)
if (CONST_INT_P (count_exp))
count = expected_size = INTVAL (count_exp);
if (GET_CODE (expected_size_exp) == CONST_INT && count == 0)
if (CONST_INT_P (expected_size_exp) && count == 0)
expected_size = INTVAL (expected_size_exp);
/* Step 0: Decide on preferred algorithm, desired alignment and
@ -13914,7 +13914,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
/* Step 1: Prologue guard. */
/* Alignment code needs count to be in register. */
if (GET_CODE (count_exp) == CONST_INT && desired_align > align)
if (CONST_INT_P (count_exp) && desired_align > align)
{
enum machine_mode mode = SImode;
if (TARGET_64BIT && (count & ~0xffffffff))
@ -13924,7 +13924,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
/* Do the cheap promotion to allow better CSE across the
main loop and epilogue (ie one load of the big constant in the
front of all code. */
if (GET_CODE (val_exp) == CONST_INT)
if (CONST_INT_P (val_exp))
promoted_val = promote_duplicated_reg_to_size (val_exp, size_needed,
desired_align, align);
/* Ensure that alignment prologue won't copy past end of block. */
@ -14022,7 +14022,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
break;
}
/* Adjust properly the offset of src and dest memory for aliasing. */
if (GET_CODE (count_exp) == CONST_INT)
if (CONST_INT_P (count_exp))
dst = adjust_automodify_address_nv (dst, BLKmode, destreg,
(count / size_needed) * size_needed);
else
@ -14076,7 +14076,7 @@ ix86_expand_strlen (rtx out, rtx src, rtx eoschar, rtx align)
if (TARGET_UNROLL_STRLEN && eoschar == const0_rtx && optimize > 1
&& !TARGET_INLINE_ALL_STRINGOPS
&& !optimize_size
&& (GET_CODE (align) != CONST_INT || INTVAL (align) < 4))
&& (!CONST_INT_P (align) || INTVAL (align) < 4))
return 0;
addr = force_reg (Pmode, XEXP (src, 0));
@ -14161,7 +14161,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
rtx cmp;
align = 0;
if (GET_CODE (align_rtx) == CONST_INT)
if (CONST_INT_P (align_rtx))
align = INTVAL (align_rtx);
/* Loop to check 1..3 bytes for null to get an aligned pointer. */
@ -14606,7 +14606,7 @@ ix86_attr_length_address_default (rtx insn)
extract_insn_cached (insn);
for (i = recog_data.n_operands - 1; i >= 0; --i)
if (GET_CODE (recog_data.operand[i]) == MEM)
if (MEM_P (recog_data.operand[i]))
{
return memory_address_length (XEXP (recog_data.operand[i], 0));
break;
@ -14673,7 +14673,7 @@ ix86_flags_dependent (rtx insn, rtx dep_insn, enum attr_type insn_type)
else
return 0;
if (GET_CODE (set) != REG || REGNO (set) != FLAGS_REG)
if (!REG_P (set) || REGNO (set) != FLAGS_REG)
return 0;
/* This test is true if the dependent insn reads the flags but
@ -14712,7 +14712,7 @@ ix86_agi_dependent (rtx insn, rtx dep_insn, enum attr_type insn_type)
int i;
extract_insn_cached (insn);
for (i = recog_data.n_operands - 1; i >= 0; --i)
if (GET_CODE (recog_data.operand[i]) == MEM)
if (MEM_P (recog_data.operand[i]))
{
addr = XEXP (recog_data.operand[i], 0);
goto found;
@ -14775,7 +14775,7 @@ ix86_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
&& (set = single_set (dep_insn)) != NULL_RTX
&& (set2 = single_set (insn)) != NULL_RTX
&& rtx_equal_p (SET_DEST (set), SET_SRC (set2))
&& GET_CODE (SET_DEST (set2)) == MEM)
&& MEM_P (SET_DEST (set2)))
cost += 1;
/* Show ability of reorder buffer to hide latency of load by executing
@ -18250,7 +18250,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
return false;
case ASHIFT:
if (GET_CODE (XEXP (x, 1)) == CONST_INT
if (CONST_INT_P (XEXP (x, 1))
&& (GET_MODE (XEXP (x, 0)) != DImode || TARGET_64BIT))
{
HOST_WIDE_INT value = INTVAL (XEXP (x, 1));
@ -18274,7 +18274,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
case ROTATERT:
if (!TARGET_64BIT && GET_MODE (XEXP (x, 0)) == DImode)
{
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
if (CONST_INT_P (XEXP (x, 1)))
{
if (INTVAL (XEXP (x, 1)) > 32)
*total = ix86_cost->shift_const + COSTS_N_INSNS (2);
@ -18291,7 +18291,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
}
else
{
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
if (CONST_INT_P (XEXP (x, 1)))
*total = ix86_cost->shift_const;
else
*total = ix86_cost->shift_var;
@ -18309,7 +18309,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
rtx op0 = XEXP (x, 0);
rtx op1 = XEXP (x, 1);
int nbits;
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
if (CONST_INT_P (XEXP (x, 1)))
{
unsigned HOST_WIDE_INT value = INTVAL (XEXP (x, 1));
for (nbits = 0; value != 0; value &= value - 1)
@ -18329,7 +18329,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
if (GET_CODE (op0) == GET_CODE (op1))
is_mulwiden = 1, op1 = XEXP (op1, 0);
else if (GET_CODE (op1) == CONST_INT)
else if (CONST_INT_P (op1))
{
if (GET_CODE (op0) == SIGN_EXTEND)
is_mulwiden = trunc_int_for_mode (INTVAL (op1), inner_mode)
@ -18367,7 +18367,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
{
if (GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
&& GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
&& CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
&& CONSTANT_P (XEXP (x, 1)))
{
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1));
@ -18382,7 +18382,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
}
}
else if (GET_CODE (XEXP (x, 0)) == MULT
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)))
{
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
if (val == 2 || val == 4 || val == 8)
@ -18444,7 +18444,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
case COMPARE:
if (GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
&& XEXP (XEXP (x, 0), 1) == const1_rtx
&& GET_CODE (XEXP (XEXP (x, 0), 2)) == CONST_INT
&& CONST_INT_P (XEXP (XEXP (x, 0), 2))
&& XEXP (x, 1) == const0_rtx)
{
/* This kind of construct is implemented using test[bwl].
@ -18938,14 +18938,14 @@ min_insn_size (rtx insn)
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
&& XINT (PATTERN (insn), 1) == UNSPECV_ALIGN)
return 0;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
return 0;
/* Important case - calls are always 5 bytes.
It is common to have many calls in the row. */
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& symbolic_reference_mentioned_p (PATTERN (insn))
&& !SIBLING_CALL_P (insn))
return 5;
@ -18955,7 +18955,7 @@ min_insn_size (rtx insn)
/* For normal instructions we may rely on the sizes of addresses
and the presence of symbol to require 4 bytes of encoding.
This is not the case for jumps where references are PC relative. */
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
{
l = get_attr_length_address (insn);
if (l < 4 && symbolic_reference_mentioned_p (PATTERN (insn)))
@ -18994,10 +18994,10 @@ ix86_avoid_jump_misspredicts (void)
if (dump_file)
fprintf(dump_file, "Insn %i estimated to %i bytes\n",
INSN_UID (insn), min_insn_size (insn));
if ((GET_CODE (insn) == JUMP_INSN
if ((JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
|| GET_CODE (insn) == CALL_INSN)
|| CALL_P (insn))
njumps++;
else
continue;
@ -19005,10 +19005,10 @@ ix86_avoid_jump_misspredicts (void)
while (njumps > 3)
{
start = NEXT_INSN (start);
if ((GET_CODE (start) == JUMP_INSN
if ((JUMP_P (start)
&& GET_CODE (PATTERN (start)) != ADDR_VEC
&& GET_CODE (PATTERN (start)) != ADDR_DIFF_VEC)
|| GET_CODE (start) == CALL_INSN)
|| CALL_P (start))
njumps--, isjump = 1;
else
isjump = 0;
@ -19048,13 +19048,13 @@ ix86_pad_returns (void)
rtx prev;
bool replace = false;
if (GET_CODE (ret) != JUMP_INSN || GET_CODE (PATTERN (ret)) != RETURN
if (!JUMP_P (ret) || GET_CODE (PATTERN (ret)) != RETURN
|| !maybe_hot_bb_p (bb))
continue;
for (prev = PREV_INSN (ret); prev; prev = PREV_INSN (prev))
if (active_insn_p (prev) || GET_CODE (prev) == CODE_LABEL)
if (active_insn_p (prev) || LABEL_P (prev))
break;
if (prev && GET_CODE (prev) == CODE_LABEL)
if (prev && LABEL_P (prev))
{
edge e;
edge_iterator ei;
@ -19068,8 +19068,8 @@ ix86_pad_returns (void)
{
prev = prev_active_insn (ret);
if (prev
&& ((GET_CODE (prev) == JUMP_INSN && any_condjump_p (prev))
|| GET_CODE (prev) == CALL_INSN))
&& ((JUMP_P (prev) && any_condjump_p (prev))
|| CALL_P (prev)))
replace = true;
/* Empty functions get branch mispredict even when the jump destination
is not visible to us. */

File diff suppressed because it is too large Load Diff

View File

@ -68,7 +68,7 @@
(match_operand:MMXMODEI 1 "vector_move_operand"
"Cr ,m,C ,*ym,*y,Y ,*y,C,xm,x,x,r"))]
"TARGET_64BIT && TARGET_MMX
&& (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
movq\t{%1, %0|%0, %1}
movq\t{%1, %0|%0, %1}
@ -92,7 +92,7 @@
(match_operand:MMXMODEI 1 "vector_move_operand"
"C ,*ym,*y,*Y,*y,C ,*Ym,*Y,C ,*x,m ,*x,irm,r"))]
"TARGET_MMX
&& (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
pxor\t%0, %0
movq\t{%1, %0|%0, %1}
@ -127,7 +127,7 @@
(match_operand:V2SF 1 "vector_move_operand"
"Cr ,m ,C ,*ym,*y,Y ,*y,C,x,m,x,x,r"))]
"TARGET_64BIT && TARGET_MMX
&& (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
movq\t{%1, %0|%0, %1}
movq\t{%1, %0|%0, %1}
@ -152,7 +152,7 @@
(match_operand:V2SF 1 "vector_move_operand"
"C ,*ym,*y,*Y,*y,C ,*x,m ,*x,irm,r"))]
"TARGET_MMX
&& (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
pxor\t%0, %0
movq\t{%1, %0|%0, %1}

View File

@ -146,7 +146,7 @@
if (ix86_cmodel == CM_LARGE)
return 0;
if (GET_CODE (op2) != CONST_INT)
if (!CONST_INT_P (op2))
return 0;
offset = trunc_int_for_mode (INTVAL (op2), DImode);
switch (GET_CODE (op1))
@ -266,7 +266,7 @@
if ((ix86_cmodel == CM_SMALL
|| (ix86_cmodel == CM_MEDIUM
&& !SYMBOL_REF_FAR_ADDR_P (op1)))
&& GET_CODE (op2) == CONST_INT
&& CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
return 1;
@ -280,7 +280,7 @@
/* These conditions are similar to SYMBOL_REF ones, just the
constraints for code models differ. */
if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
&& GET_CODE (op2) == CONST_INT
&& CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
return 1;
@ -340,7 +340,7 @@
if (TARGET_64BIT && GET_CODE (op) == CONST)
{
op = XEXP (op, 0);
if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
op = XEXP (op, 0);
if (GET_CODE (op) == UNSPEC
&& (XINT (op, 1) == UNSPEC_GOTOFF
@ -380,7 +380,7 @@
|| XINT (op, 1) == UNSPEC_GOTPCREL)))
return 1;
if (GET_CODE (op) != PLUS
|| GET_CODE (XEXP (op, 1)) != CONST_INT)
|| !CONST_INT_P (XEXP (op, 1)))
return 0;
op = XEXP (op, 0);
@ -423,7 +423,7 @@
if (GET_CODE (op) == UNSPEC)
return 1;
if (GET_CODE (op) != PLUS
|| GET_CODE (XEXP (op, 1)) != CONST_INT)
|| !CONST_INT_P (XEXP (op, 1)))
return 0;
op = XEXP (op, 0);
if (GET_CODE (op) == UNSPEC)
@ -438,7 +438,7 @@
{
if (GET_CODE (op) == CONST
&& GET_CODE (XEXP (op, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
&& CONST_INT_P (XEXP (XEXP (op, 0), 1)))
op = XEXP (XEXP (op, 0), 0);
if (GET_CODE (op) == LABEL_REF)
@ -784,7 +784,7 @@
}
if (parts.disp)
{
if (GET_CODE (parts.disp) != CONST_INT
if (!CONST_INT_P (parts.disp)
|| (INTVAL (parts.disp) & 3) != 0)
return 0;
}
@ -911,7 +911,7 @@
enum machine_mode inmode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
if (GET_CODE (XEXP (op, 0)) != REG
if (!REG_P (XEXP (op, 0))
|| REGNO (XEXP (op, 0)) != FLAGS_REG
|| XEXP (op, 1) != const0_rtx)
return 0;