[Patch 1/4] Simplify the representation of CCMP patterns by using

2015-01-19  Wilco Dijkstra  <wdijkstr@arm.com>

    gcc/
        * target.def (gen_ccmp_first): Update documentation.
        (gen_ccmp_next): Likewise.
        * doc/tm.texi (gen_ccmp_first): Update documentation.
        (gen_ccmp_next): Likewise.
        * ccmp.c (expand_ccmp_expr): Extract cmp_code from return value of
        expand_ccmp_expr_1.  Improve comments.
        * config/aarch64/aarch64.md (ccmp_and): Use if_then_else for ccmp.
        (ccmp_ior<mode>): Remove pattern.
        (cmp<mode>): Remove expand.
        (cmp): Globalize pattern.
        (cstorecc4): Use cc_register.
        (mov<mode>cc): Remove ccmp_cc_register check.
        * config/aarch64/aarch64.c (aarch64_get_condition_code_1):
        Simplify after removal of CC_DNE/* modes.
        (aarch64_ccmp_mode_to_code): Remove.
        (aarch64_print_operand): Remove 'K' case.  Merge 'm' and 'M' cases.
        In 'k' case use integer as condition.
        (aarch64_nzcv_codes): Remove inverted cases.
        (aarch64_code_to_ccmode): Remove.
        (aarch64_gen_ccmp_first): Use cmp pattern directly.  Return the correct
        comparison with CC register to be used in folowing CCMP/branch/CSEL.
        (aarch64_gen_ccmp_next): Use previous comparison and mode in CCMP
        pattern.  Return the comparison with CC register.  Invert conditions
        when bitcode is OR.
        * config/aarch64/aarch64-modes.def: Remove CC_DNE/* modes.
        * config/aarch64/predicates.md (ccmp_cc_register): Remove.

From-SVN: r232561
This commit is contained in:
Wilco Dijkstra 2016-01-19 13:59:56 +00:00 committed by Jiong Wang
parent 756f4e9049
commit c8012fbce9
8 changed files with 158 additions and 366 deletions

View File

@ -1,3 +1,32 @@
2016-01-19 Wilco Dijkstra <wdijkstr@arm.com>
* target.def (gen_ccmp_first): Update documentation.
(gen_ccmp_next): Likewise.
* doc/tm.texi (gen_ccmp_first): Update documentation.
(gen_ccmp_next): Likewise.
* ccmp.c (expand_ccmp_expr): Extract cmp_code from return value of
expand_ccmp_expr_1. Improve comments.
* config/aarch64/aarch64.md (ccmp_and): Use if_then_else for ccmp.
(ccmp_ior<mode>): Remove pattern.
(cmp<mode>): Remove expand.
(cmp): Globalize pattern.
(cstorecc4): Use cc_register.
(mov<mode>cc): Remove ccmp_cc_register check.
* config/aarch64/aarch64.c (aarch64_get_condition_code_1):
Simplify after removal of CC_DNE/* modes.
(aarch64_ccmp_mode_to_code): Remove.
(aarch64_print_operand): Remove 'K' case. Merge 'm' and 'M' cases.
In 'k' case use integer as condition.
(aarch64_nzcv_codes): Remove inverted cases.
(aarch64_code_to_ccmode): Remove.
(aarch64_gen_ccmp_first): Use cmp pattern directly. Return the correct
comparison with CC register to be used in folowing CCMP/branch/CSEL.
(aarch64_gen_ccmp_next): Use previous comparison and mode in CCMP
pattern. Return the comparison with CC register. Invert conditions
when bitcode is OR.
* config/aarch64/aarch64-modes.def: Remove CC_DNE/* modes.
* config/aarch64/predicates.md (ccmp_cc_register): Remove.
2016-01-19 Jan Hubicka <hubicka@ucw.cz>
* cgraphunit.c (cgraph_node::reset): Clear thunk info and

View File

@ -49,6 +49,10 @@ along with GCC; see the file COPYING3. If not see
- gen_ccmp_first expands the first compare in CCMP.
- gen_ccmp_next expands the following compares.
Both hooks return a comparison with the CC register that is equivalent
to the value of the gimple comparison. This is used by the next CCMP
and in the final conditional store.
* We use cstorecc4 pattern to convert the CCmode intermediate to
the integer mode result that expand_normal is expecting.
@ -114,10 +118,12 @@ ccmp_candidate_p (gimple *g)
return false;
}
/* PREV is the CC flag from precvious compares. The function expands the
next compare based on G which ops previous compare with CODE.
/* PREV is a comparison with the CC register which represents the
result of the previous CMP or CCMP. The function expands the
next compare based on G which is ANDed/ORed with the previous
compare depending on CODE.
PREP_SEQ returns all insns to prepare opearands for compare.
GEN_SEQ returnss all compare insns. */
GEN_SEQ returns all compare insns. */
static rtx
expand_ccmp_next (gimple *g, enum tree_code code, rtx prev,
rtx *prep_seq, rtx *gen_seq)
@ -210,7 +216,7 @@ expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
return NULL_RTX;
}
/* Main entry to expand conditional compare statement G.
/* Main entry to expand conditional compare statement G.
Return NULL_RTX if G is not a legal candidate or expand fail.
Otherwise return the target. */
rtx
@ -233,9 +239,10 @@ expand_ccmp_expr (gimple *g)
enum insn_code icode;
enum machine_mode cc_mode = CCmode;
tree lhs = gimple_assign_lhs (g);
rtx_code cmp_code = GET_CODE (tmp);
#ifdef SELECT_CC_MODE
cc_mode = SELECT_CC_MODE (NE, tmp, const0_rtx);
cc_mode = SELECT_CC_MODE (cmp_code, XEXP (tmp, 0), const0_rtx);
#endif
icode = optab_handler (cstore_optab, cc_mode);
if (icode != CODE_FOR_nothing)
@ -246,8 +253,8 @@ expand_ccmp_expr (gimple *g)
emit_insn (prep_seq);
emit_insn (gen_seq);
tmp = emit_cstore (target, icode, NE, cc_mode, cc_mode,
0, tmp, const0_rtx, 1, mode);
tmp = emit_cstore (target, icode, cmp_code, cc_mode, cc_mode,
0, XEXP (tmp, 0), const0_rtx, 1, mode);
if (tmp)
return tmp;
}

View File

@ -25,16 +25,6 @@ CC_MODE (CC_ZESWP); /* zero-extend LHS (but swap to make it RHS). */
CC_MODE (CC_SESWP); /* sign-extend LHS (but swap to make it RHS). */
CC_MODE (CC_NZ); /* Only N and Z bits of condition flags are valid. */
CC_MODE (CC_Z); /* Only Z bit of condition flags is valid. */
CC_MODE (CC_DNE);
CC_MODE (CC_DEQ);
CC_MODE (CC_DLE);
CC_MODE (CC_DLT);
CC_MODE (CC_DGE);
CC_MODE (CC_DGT);
CC_MODE (CC_DLEU);
CC_MODE (CC_DLTU);
CC_MODE (CC_DGEU);
CC_MODE (CC_DGTU);
/* Half-precision floating point for __fp16. */
FLOAT_MODE (HF, 2, 0);

View File

@ -4205,7 +4205,6 @@ aarch64_get_condition_code (rtx x)
static int
aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code)
{
int ne = -1, eq = -1;
switch (mode)
{
case CCFPmode:
@ -4228,56 +4227,6 @@ aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code)
}
break;
case CC_DNEmode:
ne = AARCH64_NE;
eq = AARCH64_EQ;
break;
case CC_DEQmode:
ne = AARCH64_EQ;
eq = AARCH64_NE;
break;
case CC_DGEmode:
ne = AARCH64_GE;
eq = AARCH64_LT;
break;
case CC_DLTmode:
ne = AARCH64_LT;
eq = AARCH64_GE;
break;
case CC_DGTmode:
ne = AARCH64_GT;
eq = AARCH64_LE;
break;
case CC_DLEmode:
ne = AARCH64_LE;
eq = AARCH64_GT;
break;
case CC_DGEUmode:
ne = AARCH64_CS;
eq = AARCH64_CC;
break;
case CC_DLTUmode:
ne = AARCH64_CC;
eq = AARCH64_CS;
break;
case CC_DGTUmode:
ne = AARCH64_HI;
eq = AARCH64_LS;
break;
case CC_DLEUmode:
ne = AARCH64_LS;
eq = AARCH64_HI;
break;
case CCmode:
switch (comp_code)
{
@ -4339,12 +4288,6 @@ aarch64_get_condition_code_1 (enum machine_mode mode, enum rtx_code comp_code)
break;
}
if (comp_code == NE)
return ne;
if (comp_code == EQ)
return eq;
return -1;
}
@ -4385,69 +4328,27 @@ aarch64_const_vec_all_same_int_p (rtx x, HOST_WIDE_INT val)
#define AARCH64_CC_Z (1 << 2)
#define AARCH64_CC_N (1 << 3)
/* N Z C V flags for ccmp. The first code is for AND op and the other
is for IOR op. Indexed by AARCH64_COND_CODE. */
static const int aarch64_nzcv_codes[][2] =
/* N Z C V flags for ccmp. Indexed by AARCH64_COND_CODE. */
static const int aarch64_nzcv_codes[] =
{
{AARCH64_CC_Z, 0}, /* EQ, Z == 1. */
{0, AARCH64_CC_Z}, /* NE, Z == 0. */
{AARCH64_CC_C, 0}, /* CS, C == 1. */
{0, AARCH64_CC_C}, /* CC, C == 0. */
{AARCH64_CC_N, 0}, /* MI, N == 1. */
{0, AARCH64_CC_N}, /* PL, N == 0. */
{AARCH64_CC_V, 0}, /* VS, V == 1. */
{0, AARCH64_CC_V}, /* VC, V == 0. */
{AARCH64_CC_C, 0}, /* HI, C ==1 && Z == 0. */
{0, AARCH64_CC_C}, /* LS, !(C == 1 && Z == 0). */
{0, AARCH64_CC_V}, /* GE, N == V. */
{AARCH64_CC_V, 0}, /* LT, N != V. */
{0, AARCH64_CC_Z}, /* GT, Z == 0 && N == V. */
{AARCH64_CC_Z, 0}, /* LE, !(Z == 0 && N == V). */
{0, 0}, /* AL, Any. */
{0, 0}, /* NV, Any. */
0, /* EQ, Z == 1. */
AARCH64_CC_Z, /* NE, Z == 0. */
0, /* CS, C == 1. */
AARCH64_CC_C, /* CC, C == 0. */
0, /* MI, N == 1. */
AARCH64_CC_N, /* PL, N == 0. */
0, /* VS, V == 1. */
AARCH64_CC_V, /* VC, V == 0. */
0, /* HI, C ==1 && Z == 0. */
AARCH64_CC_C, /* LS, !(C == 1 && Z == 0). */
AARCH64_CC_V, /* GE, N == V. */
0, /* LT, N != V. */
AARCH64_CC_Z, /* GT, Z == 0 && N == V. */
0, /* LE, !(Z == 0 && N == V). */
0, /* AL, Any. */
0 /* NV, Any. */
};
int
aarch64_ccmp_mode_to_code (enum machine_mode mode)
{
switch (mode)
{
case CC_DNEmode:
return NE;
case CC_DEQmode:
return EQ;
case CC_DLEmode:
return LE;
case CC_DGTmode:
return GT;
case CC_DLTmode:
return LT;
case CC_DGEmode:
return GE;
case CC_DLEUmode:
return LEU;
case CC_DGTUmode:
return GTU;
case CC_DLTUmode:
return LTU;
case CC_DGEUmode:
return GEU;
default:
gcc_unreachable ();
}
}
static void
aarch64_print_operand (FILE *f, rtx x, int code)
{
@ -4546,14 +4447,19 @@ aarch64_print_operand (FILE *f, rtx x, int code)
asm_fprintf (f, "%s", reg_names [REGNO (x) + 1]);
break;
case 'M':
case 'm':
{
int cond_code;
/* Print a condition (eq, ne, etc). */
/* Print a condition (eq, ne, etc) or its inverse. */
/* CONST_TRUE_RTX means always -- that's the default. */
if (x == const_true_rtx)
return;
/* CONST_TRUE_RTX means al/nv (al is the default, don't print it). */
if (x == const_true_rtx)
{
if (code == 'M')
fputs ("nv", f);
return;
}
if (!COMPARISON_P (x))
{
@ -4563,31 +4469,9 @@ aarch64_print_operand (FILE *f, rtx x, int code)
cond_code = aarch64_get_condition_code (x);
gcc_assert (cond_code >= 0);
fputs (aarch64_condition_codes[cond_code], f);
}
break;
case 'M':
{
int cond_code;
/* Print the inverse of a condition (eq <-> ne, etc). */
/* CONST_TRUE_RTX means never -- that's the default. */
if (x == const_true_rtx)
{
fputs ("nv", f);
return;
}
if (!COMPARISON_P (x))
{
output_operand_lossage ("invalid operand for '%%%c'", code);
return;
}
cond_code = aarch64_get_condition_code (x);
gcc_assert (cond_code >= 0);
fputs (aarch64_condition_codes[AARCH64_INVERSE_CONDITION_CODE
(cond_code)], f);
if (code == 'M')
cond_code = AARCH64_INVERSE_CONDITION_CODE (cond_code);
fputs (aarch64_condition_codes[cond_code], f);
}
break;
@ -4828,37 +4712,20 @@ aarch64_print_operand (FILE *f, rtx x, int code)
output_addr_const (asm_out_file, x);
break;
case 'K':
{
int cond_code;
/* Print nzcv. */
if (!COMPARISON_P (x))
{
output_operand_lossage ("invalid operand for '%%%c'", code);
return;
}
cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x));
gcc_assert (cond_code >= 0);
asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][0]);
}
break;
case 'k':
{
int cond_code;
HOST_WIDE_INT cond_code;
/* Print nzcv. */
if (!COMPARISON_P (x))
if (!CONST_INT_P (x))
{
output_operand_lossage ("invalid operand for '%%%c'", code);
return;
}
cond_code = aarch64_get_condition_code_1 (CCmode, GET_CODE (x));
gcc_assert (cond_code >= 0);
asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code][1]);
cond_code = INTVAL (x);
gcc_assert (cond_code >= 0 && cond_code <= AARCH64_NV);
asm_fprintf (f, "%d", aarch64_nzcv_codes[cond_code]);
}
break;
@ -13066,60 +12933,16 @@ aarch64_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
return default_use_by_pieces_infrastructure_p (size, align, op, speed_p);
}
static enum machine_mode
aarch64_code_to_ccmode (enum rtx_code code)
{
switch (code)
{
case NE:
return CC_DNEmode;
case EQ:
return CC_DEQmode;
case LE:
return CC_DLEmode;
case LT:
return CC_DLTmode;
case GE:
return CC_DGEmode;
case GT:
return CC_DGTmode;
case LEU:
return CC_DLEUmode;
case LTU:
return CC_DLTUmode;
case GEU:
return CC_DGEUmode;
case GTU:
return CC_DGTUmode;
default:
return CCmode;
}
}
static rtx
aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
int code, tree treeop0, tree treeop1)
{
enum machine_mode op_mode, cmp_mode, cc_mode;
rtx op0, op1, cmp, target;
machine_mode op_mode, cmp_mode, cc_mode = CCmode;
rtx op0, op1;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
enum insn_code icode;
insn_code icode;
struct expand_operand ops[4];
cc_mode = aarch64_code_to_ccmode ((enum rtx_code) code);
if (cc_mode == CCmode)
return NULL_RTX;
start_sequence ();
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
@ -13146,8 +12969,8 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
return NULL_RTX;
}
op0 = prepare_operand (icode, op0, 2, op_mode, cmp_mode, unsignedp);
op1 = prepare_operand (icode, op1, 3, op_mode, cmp_mode, unsignedp);
op0 = prepare_operand (icode, op0, 0, op_mode, cmp_mode, unsignedp);
op1 = prepare_operand (icode, op1, 1, op_mode, cmp_mode, unsignedp);
if (!op0 || !op1)
{
end_sequence ();
@ -13156,16 +12979,11 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
*prep_seq = get_insns ();
end_sequence ();
cmp = gen_rtx_fmt_ee ((enum rtx_code) code, cmp_mode, op0, op1);
target = gen_rtx_REG (CCmode, CC_REGNUM);
create_output_operand (&ops[0], target, CCmode);
create_fixed_operand (&ops[1], cmp);
create_fixed_operand (&ops[2], op0);
create_fixed_operand (&ops[3], op1);
create_fixed_operand (&ops[0], op0);
create_fixed_operand (&ops[1], op1);
start_sequence ();
if (!maybe_expand_insn (icode, 4, ops))
if (!maybe_expand_insn (icode, 2, ops))
{
end_sequence ();
return NULL_RTX;
@ -13173,22 +12991,20 @@ aarch64_gen_ccmp_first (rtx *prep_seq, rtx *gen_seq,
*gen_seq = get_insns ();
end_sequence ();
return gen_rtx_REG (cc_mode, CC_REGNUM);
return gen_rtx_fmt_ee ((rtx_code) code, cc_mode,
gen_rtx_REG (cc_mode, CC_REGNUM), const0_rtx);
}
static rtx
aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
tree treeop0, tree treeop1, int bit_code)
{
rtx op0, op1, cmp0, cmp1, target;
enum machine_mode op_mode, cmp_mode, cc_mode;
rtx op0, op1, target;
machine_mode op_mode, cmp_mode, cc_mode = CCmode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
enum insn_code icode = CODE_FOR_ccmp_andsi;
insn_code icode;
struct expand_operand ops[6];
cc_mode = aarch64_code_to_ccmode ((enum rtx_code) cmp_code);
if (cc_mode == CCmode)
return NULL_RTX;
int aarch64_cond;
push_to_sequence ((rtx_insn*) *prep_seq);
expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
@ -13203,14 +13019,12 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
case HImode:
case SImode:
cmp_mode = SImode;
icode = (enum rtx_code) bit_code == AND ? CODE_FOR_ccmp_andsi
: CODE_FOR_ccmp_iorsi;
icode = CODE_FOR_ccmpsi;
break;
case DImode:
cmp_mode = DImode;
icode = (enum rtx_code) bit_code == AND ? CODE_FOR_ccmp_anddi
: CODE_FOR_ccmp_iordi;
icode = CODE_FOR_ccmpdi;
break;
default:
@ -13229,15 +13043,22 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
end_sequence ();
target = gen_rtx_REG (cc_mode, CC_REGNUM);
cmp1 = gen_rtx_fmt_ee ((enum rtx_code) cmp_code, cmp_mode, op0, op1);
cmp0 = gen_rtx_fmt_ee (NE, cmp_mode, prev, const0_rtx);
aarch64_cond = aarch64_get_condition_code_1 (cc_mode, (rtx_code) cmp_code);
create_fixed_operand (&ops[0], prev);
if (bit_code != AND)
{
prev = gen_rtx_fmt_ee (REVERSE_CONDITION (GET_CODE (prev),
GET_MODE (XEXP (prev, 0))),
VOIDmode, XEXP (prev, 0), const0_rtx);
aarch64_cond = AARCH64_INVERSE_CONDITION_CODE (aarch64_cond);
}
create_fixed_operand (&ops[0], XEXP (prev, 0));
create_fixed_operand (&ops[1], target);
create_fixed_operand (&ops[2], op0);
create_fixed_operand (&ops[3], op1);
create_fixed_operand (&ops[4], cmp0);
create_fixed_operand (&ops[5], cmp1);
create_fixed_operand (&ops[4], prev);
create_fixed_operand (&ops[5], GEN_INT (aarch64_cond));
push_to_sequence ((rtx_insn*) *gen_seq);
if (!maybe_expand_insn (icode, 6, ops))
@ -13249,7 +13070,7 @@ aarch64_gen_ccmp_next (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code,
*gen_seq = get_insns ();
end_sequence ();
return target;
return gen_rtx_fmt_ee ((rtx_code) cmp_code, VOIDmode, target, const0_rtx);
}
#undef TARGET_GEN_CCMP_FIRST

View File

@ -271,18 +271,17 @@
""
"")
(define_insn "ccmp_and<mode>"
[(set (match_operand 1 "ccmp_cc_register" "")
(compare
(and:SI
(define_insn "ccmp<mode>"
[(set (match_operand:CC 1 "cc_register" "")
(if_then_else:CC
(match_operator 4 "aarch64_comparison_operator"
[(match_operand 0 "ccmp_cc_register" "")
[(match_operand 0 "cc_register" "")
(const_int 0)])
(match_operator 5 "aarch64_comparison_operator"
[(match_operand:GPI 2 "register_operand" "r,r,r")
(match_operand:GPI 3 "aarch64_ccmp_operand" "r,Uss,Usn")]))
(const_int 0)))]
"aarch64_ccmp_mode_to_code (GET_MODE (operands[1])) == GET_CODE (operands[5])"
(compare:CC
(match_operand:GPI 2 "register_operand" "r,r,r")
(match_operand:GPI 3 "aarch64_ccmp_operand" "r,Uss,Usn"))
(match_operand 5 "immediate_operand")))]
""
"@
ccmp\\t%<w>2, %<w>3, %k5, %m4
ccmp\\t%<w>2, %<w>3, %k5, %m4
@ -290,39 +289,6 @@
[(set_attr "type" "alus_sreg,alus_imm,alus_imm")]
)
(define_insn "ccmp_ior<mode>"
[(set (match_operand 1 "ccmp_cc_register" "")
(compare
(ior:SI
(match_operator 4 "aarch64_comparison_operator"
[(match_operand 0 "ccmp_cc_register" "")
(const_int 0)])
(match_operator 5 "aarch64_comparison_operator"
[(match_operand:GPI 2 "register_operand" "r,r,r")
(match_operand:GPI 3 "aarch64_ccmp_operand" "r,Uss,Usn")]))
(const_int 0)))]
"aarch64_ccmp_mode_to_code (GET_MODE (operands[1])) == GET_CODE (operands[5])"
"@
ccmp\\t%<w>2, %<w>3, %K5, %M4
ccmp\\t%<w>2, %<w>3, %K5, %M4
ccmn\\t%<w>2, #%n3, %K5, %M4"
[(set_attr "type" "alus_sreg,alus_imm,alus_imm")]
)
(define_expand "cmp<mode>"
[(set (match_operand 0 "cc_register" "")
(match_operator:CC 1 "aarch64_comparison_operator"
[(match_operand:GPI 2 "register_operand" "")
(match_operand:GPI 3 "aarch64_plus_operand" "")]))]
""
{
operands[1] = gen_rtx_fmt_ee (COMPARE,
SELECT_CC_MODE (GET_CODE (operands[1]),
operands[2], operands[3]),
operands[2], operands[3]);
}
)
;; Expansion of signed mod by a power of 2 using CSNEG.
;; For x0 % n where n is a power of 2 produce:
;; negs x1, x0
@ -2874,7 +2840,7 @@
;; Comparison insns
;; -------------------------------------------------------------------
(define_insn "*cmp<mode>"
(define_insn "cmp<mode>"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:GPI 0 "register_operand" "r,r,r")
(match_operand:GPI 1 "aarch64_plus_operand" "r,I,J")))]
@ -2961,7 +2927,7 @@
(define_expand "cstorecc4"
[(set (match_operand:SI 0 "register_operand")
(match_operator 1 "aarch64_comparison_operator"
[(match_operand 2 "ccmp_cc_register")
[(match_operand 2 "cc_register")
(match_operand 3 "const0_operand")]))]
""
"{
@ -3164,19 +3130,15 @@
(match_operand:ALLI 3 "register_operand" "")))]
""
{
rtx ccreg;
enum rtx_code code = GET_CODE (operands[1]);
if (code == UNEQ || code == LTGT)
FAIL;
if (!ccmp_cc_register (XEXP (operands[1], 0),
GET_MODE (XEXP (operands[1], 0))))
{
rtx ccreg;
ccreg = aarch64_gen_compare_reg (code, XEXP (operands[1], 0),
XEXP (operands[1], 1));
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}
ccreg = aarch64_gen_compare_reg (code, XEXP (operands[1], 0),
XEXP (operands[1], 1));
operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
}
)

View File

@ -43,23 +43,6 @@
(ior (match_operand 0 "register_operand")
(match_operand 0 "aarch64_ccmp_immediate")))
(define_special_predicate "ccmp_cc_register"
(and (match_code "reg")
(and (match_test "REGNO (op) == CC_REGNUM")
(ior (match_test "mode == GET_MODE (op)")
(match_test "mode == VOIDmode
&& (GET_MODE (op) == CC_DNEmode
|| GET_MODE (op) == CC_DEQmode
|| GET_MODE (op) == CC_DLEmode
|| GET_MODE (op) == CC_DLTmode
|| GET_MODE (op) == CC_DGEmode
|| GET_MODE (op) == CC_DGTmode
|| GET_MODE (op) == CC_DLEUmode
|| GET_MODE (op) == CC_DLTUmode
|| GET_MODE (op) == CC_DGEUmode
|| GET_MODE (op) == CC_DGTUmode)"))))
)
(define_predicate "aarch64_simd_register"
(and (match_code "reg")
(ior (match_test "REGNO_REG_CLASS (REGNO (op)) == FP_LO_REGS")

View File

@ -11370,27 +11370,27 @@ modes and they have different conditional execution capability, such as ARM.
@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_FIRST (rtx *@var{prep_seq}, rtx *@var{gen_seq}, int @var{code}, tree @var{op0}, tree @var{op1})
This function prepares to emit a comparison insn for the first compare in a
sequence of conditional comparisions. It returns a appropriate @code{CC}
for passing to @code{gen_ccmp_next} or @code{cbranch_optab}. The insns to
prepare the compare are saved in @var{prep_seq} and the compare insns are
saved in @var{gen_seq}. They will be emitted when all the compares in the
the conditional comparision are generated without error. @var{code} is
the @code{rtx_code} of the compare for @var{op0} and @var{op1}.
sequence of conditional comparisions. It returns an appropriate comparison
with @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
The insns to prepare the compare are saved in @var{prep_seq} and the compare
insns are saved in @var{gen_seq}. They will be emitted when all the
compares in the the conditional comparision are generated without error.
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.
@end deftypefn
@deftypefn {Target Hook} rtx TARGET_GEN_CCMP_NEXT (rtx *@var{prep_seq}, rtx *@var{gen_seq}, rtx @var{prev}, int @var{cmp_code}, tree @var{op0}, tree @var{op1}, int @var{bit_code})
This function prepare to emit a conditional comparison within a sequence of
conditional comparisons. It returns a appropriate @code{CC} for passing to
@code{gen_ccmp_next} or @code{cbranch_optab}. The insns to prepare the
compare are saved in @var{prep_seq} and the compare insns are saved in
@var{gen_seq}. They will be emitted when all the compares in the conditional
comparision are generated without error. The @var{prev} expression is the
result of a prior call to @code{gen_ccmp_first} or @code{gen_ccmp_next}. It
may return @code{NULL} if the combination of @var{prev} and this comparison is
not supported, otherwise the result must be appropriate for passing to
@code{gen_ccmp_next} or @code{cbranch_optab}. @var{code} is the
@code{rtx_code} of the compare for @var{op0} and @var{op1}. @var{bit_code}
is @code{AND} or @code{IOR}, which is the op on the two compares.
This function prepares to emit a conditional comparison within a sequence
of conditional comparisons. It returns an appropriate comparison with
@code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
The insns to prepare the compare are saved in @var{prep_seq} and the compare
insns are saved in @var{gen_seq}. They will be emitted when all the
compares in the conditional comparision are generated without error. The
@var{prev} expression is the result of a prior call to @code{gen_ccmp_first}
or @code{gen_ccmp_next}. It may return @code{NULL} if the combination of
@var{prev} and this comparison is not supported, otherwise the result must
be appropriate for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.
@var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.
@end deftypefn
@deftypefn {Target Hook} unsigned TARGET_LOOP_UNROLL_ADJUST (unsigned @var{nunroll}, struct loop *@var{loop})

View File

@ -2611,29 +2611,29 @@ modes and they have different conditional execution capability, such as ARM.",
DEFHOOK
(gen_ccmp_first,
"This function prepares to emit a comparison insn for the first compare in a\n\
sequence of conditional comparisions. It returns a appropriate @code{CC}\n\
for passing to @code{gen_ccmp_next} or @code{cbranch_optab}. The insns to\n\
prepare the compare are saved in @var{prep_seq} and the compare insns are\n\
saved in @var{gen_seq}. They will be emitted when all the compares in the\n\
the conditional comparision are generated without error. @var{code} is\n\
the @code{rtx_code} of the compare for @var{op0} and @var{op1}.",
sequence of conditional comparisions. It returns an appropriate comparison\n\
with @code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\
The insns to prepare the compare are saved in @var{prep_seq} and the compare\n\
insns are saved in @var{gen_seq}. They will be emitted when all the\n\
compares in the the conditional comparision are generated without error.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.",
rtx, (rtx *prep_seq, rtx *gen_seq, int code, tree op0, tree op1),
NULL)
DEFHOOK
(gen_ccmp_next,
"This function prepare to emit a conditional comparison within a sequence of\n\
conditional comparisons. It returns a appropriate @code{CC} for passing to\n\
@code{gen_ccmp_next} or @code{cbranch_optab}. The insns to prepare the\n\
compare are saved in @var{prep_seq} and the compare insns are saved in\n\
@var{gen_seq}. They will be emitted when all the compares in the conditional\n\
comparision are generated without error. The @var{prev} expression is the\n\
result of a prior call to @code{gen_ccmp_first} or @code{gen_ccmp_next}. It\n\
may return @code{NULL} if the combination of @var{prev} and this comparison is\n\
not supported, otherwise the result must be appropriate for passing to\n\
@code{gen_ccmp_next} or @code{cbranch_optab}. @var{code} is the\n\
@code{rtx_code} of the compare for @var{op0} and @var{op1}. @var{bit_code}\n\
is @code{AND} or @code{IOR}, which is the op on the two compares.",
"This function prepares to emit a conditional comparison within a sequence\n\
of conditional comparisons. It returns an appropriate comparison with\n\
@code{CC} for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\
The insns to prepare the compare are saved in @var{prep_seq} and the compare\n\
insns are saved in @var{gen_seq}. They will be emitted when all the\n\
compares in the conditional comparision are generated without error. The\n\
@var{prev} expression is the result of a prior call to @code{gen_ccmp_first}\n\
or @code{gen_ccmp_next}. It may return @code{NULL} if the combination of\n\
@var{prev} and this comparison is not supported, otherwise the result must\n\
be appropriate for passing to @code{gen_ccmp_next} or @code{cbranch_optab}.\n\
@var{code} is the @code{rtx_code} of the compare for @var{op0} and @var{op1}.\n\
@var{bit_code} is @code{AND} or @code{IOR}, which is the op on the compares.",
rtx, (rtx *prep_seq, rtx *gen_seq, rtx prev, int cmp_code, tree op0, tree op1, int bit_code),
NULL)