re PR target/51244 ([SH] Inefficient conditional branch and code around T bit)

gcc/
	PR target/51244
	* config/sh/sh_treg_combine.cc (is_conditional_insn): New function.
	(cbranch_trace): Add member rtx* condition_rtx_in_insn, initialize it
	accordingly in constructor.
	(cbranch_trace::branch_condition_rtx_ref): New function.
	(cbranch_trace::branch_condition_rtx): Use branch_condition_rtx_ref.
	(sh_treg_combine::try_invert_branch_condition): Invert condition rtx
	in insn using reversed_comparison_code and validate_change instead of
	invert_jump_1.
	(sh_treg_combine::execute): Look for conditional insns in basic blocks
	in addition to conditional branches.
	* config/sh/sh.md (*movsicc_div0s): Remove combine patterns.

From-SVN: r218850
This commit is contained in:
Oleg Endo 2014-12-17 23:08:14 +00:00
parent ff49a9ba7b
commit 89f8797ee1
3 changed files with 69 additions and 49 deletions

View File

@ -1,3 +1,18 @@
2014-12-17 Oleg Endo <olegendo@gcc.gnu.org>
PR target/51244
* config/sh/sh_treg_combine.cc (is_conditional_insn): New function.
(cbranch_trace): Add member rtx* condition_rtx_in_insn, initialize it
accordingly in constructor.
(cbranch_trace::branch_condition_rtx_ref): New function.
(cbranch_trace::branch_condition_rtx): Use branch_condition_rtx_ref.
(sh_treg_combine::try_invert_branch_condition): Invert condition rtx
in insn using reversed_comparison_code and validate_change instead of
invert_jump_1.
(sh_treg_combine::execute): Look for conditional insns in basic blocks
in addition to conditional branches.
* config/sh/sh.md (*movsicc_div0s): Remove combine patterns.
2014-12-17 Oleg Endo <olegendo@gcc.gnu.org>
PR target/51244

View File

@ -1086,47 +1086,6 @@
(label_ref (match_dup 2))
(pc)))])
;; Conditional move combine pattern for div0s comparisons.
;; This is used when TARGET_PRETEND_CMOVE is in effect.
(define_insn_and_split "*movsicc_div0s"
[(set (match_operand:SI 0 "arith_reg_dest" "")
(if_then_else:SI (ge (xor:SI (match_operand:SI 1 "arith_reg_operand" "")
(match_operand:SI 2 "arith_reg_operand" ""))
(const_int 0))
(match_operand:SI 3 "arith_reg_operand" "")
(match_operand:SI 4 "general_movsrc_operand" "")))
(clobber (reg:SI T_REG))]
"TARGET_PRETEND_CMOVE"
"#"
"&& 1"
[(set (reg:SI T_REG) (lt:SI (xor:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0)
(if_then_else (ne (reg:SI T_REG) (const_int 0))
(match_dup 4)
(match_dup 3)))])
(define_insn_and_split "*movsicc_div0s"
[(set (match_operand:SI 0 "arith_reg_dest")
(if_then_else:SI (eq (lshiftrt:SI
(match_operand:SI 1 "arith_reg_operand")
(const_int 31))
(lshiftrt:SI
(match_operand:SI 2 "arith_reg_operand")
(const_int 31)))
(match_operand:SI 3 "arith_reg_operand")
(match_operand:SI 4 "general_movsrc_operand")))
(clobber (reg:SI T_REG))]
"TARGET_PRETEND_CMOVE"
"#"
"&& 1"
[(set (reg:SI T_REG) (lt:SI (xor:SI (match_dup 1) (match_dup 2))
(const_int 0)))
(set (match_dup 0)
(if_then_else (ne (reg:SI T_REG) (const_int 0))
(match_dup 4)
(match_dup 3)))])
;; -------------------------------------------------------------------------
;; SImode unsigned integer comparisons
;; -------------------------------------------------------------------------

View File

@ -432,6 +432,16 @@ trace_reg_uses (rtx reg, rtx_insn *start_insn, rtx abort_at_insn)
return count;
}
static bool
is_conditional_insn (rtx_insn* i)
{
if (! (INSN_P (i) && NONDEBUG_INSN_P (i)))
return false;
rtx p = PATTERN (i);
return GET_CODE (p) == SET && GET_CODE (XEXP (p, 1)) == IF_THEN_ELSE;
}
// FIXME: Remove dependency on SH predicate function somehow.
extern int t_reg_operand (rtx, machine_mode);
extern int negt_reg_operand (rtx, machine_mode);
@ -484,6 +494,7 @@ private:
struct cbranch_trace
{
rtx_insn *cbranch_insn;
rtx* condition_rtx_in_insn;
branch_condition_type_t cbranch_type;
// The comparison against zero right before the conditional branch.
@ -495,9 +506,14 @@ private:
cbranch_trace (rtx_insn *insn)
: cbranch_insn (insn),
condition_rtx_in_insn (NULL),
cbranch_type (unknown_branch_condition),
setcc ()
{
if (is_conditional_insn (cbranch_insn))
condition_rtx_in_insn = &XEXP (XEXP (PATTERN (cbranch_insn), 1), 0);
else if (rtx x = pc_set (cbranch_insn))
condition_rtx_in_insn = &XEXP (XEXP (x, 1), 0);
}
basic_block bb (void) const { return BLOCK_FOR_INSN (cbranch_insn); }
@ -505,8 +521,16 @@ private:
rtx
branch_condition_rtx (void) const
{
rtx x = pc_set (cbranch_insn);
return x == NULL_RTX ? NULL_RTX : XEXP (XEXP (x, 1), 0);
return condition_rtx_in_insn != NULL ? *condition_rtx_in_insn : NULL;
}
rtx&
branch_condition_rtx_ref (void) const
{
// Before anything gets to invoke this function, there are other checks
// in place to make sure that we have a known branch condition and thus
// the ref to the rtx in the insn.
gcc_assert (condition_rtx_in_insn != NULL);
return *condition_rtx_in_insn;
}
bool
@ -1033,8 +1057,18 @@ sh_treg_combine::try_invert_branch_condition (cbranch_trace& trace)
{
log_msg ("inverting branch condition\n");
if (!invert_jump_1 (trace.cbranch_insn, JUMP_LABEL (trace.cbranch_insn)))
log_return (false, "invert_jump_1 failed\n");
rtx& comp = trace.branch_condition_rtx_ref ();
rtx_code rev_cmp_code = reversed_comparison_code (comp, trace.cbranch_insn);
if (rev_cmp_code == UNKNOWN)
log_return (false, "reversed_comparison_code = UNKNOWN\n");
validate_change (trace.cbranch_insn, &comp,
gen_rtx_fmt_ee (rev_cmp_code,
GET_MODE (comp), XEXP (comp, 0),
XEXP (comp, 1)),
1);
if (verify_changes (num_validated_changes ()))
confirm_change_group ();
@ -1531,14 +1565,26 @@ sh_treg_combine::execute (function *fun)
log_rtx (m_ccreg);
log_msg (" STORE_FLAG_VALUE = %d\n", STORE_FLAG_VALUE);
// Look for basic blocks that end with a conditional branch and try to
// optimize them.
// Look for basic blocks that end with a conditional branch or for
// conditional insns and try to optimize them.
basic_block bb;
FOR_EACH_BB_FN (bb, fun)
{
rtx_insn *i = BB_END (bb);
rtx_insn* i = BB_END (bb);
if (i == NULL || i == PREV_INSN (BB_HEAD (bb)))
continue;
// A conditional branch is always the last insn of a basic block.
if (any_condjump_p (i) && onlyjump_p (i))
try_optimize_cbranch (i);
{
try_optimize_cbranch (i);
i = PREV_INSN (i);
}
// Check all insns in block for conditional insns.
for (; i != NULL && i != PREV_INSN (BB_HEAD (bb)); i = PREV_INSN (i))
if (is_conditional_insn (i))
try_optimize_cbranch (i);
}
log_msg ("\n\n");