ia64-protos.h (ia64_expand_prediction): Declare.

* config/ia64/ia64-protos.h (ia64_expand_prediction): Declare.
        * config/ia64/ia64.c (ia64_print_operand_address): Handle 'j'.
        (ia64_expand_prediction): New.
        (emit_insn_group_barriers): Don't look at notes.  Emit barriers
        as needed after calls.
        (ia64_epilogue_uses): Mark b0 used.
        * config/ia64/ia64.md (beq_true, beq_false): Remove.
        (bne_true, bne_false): Remove.
        (eq_return, eq_not_return, ne_return, ne_not_return): Remove.
        (br_true, br_false): New.  Use predicate_operator and
        ia64_expand_prediction.
        (return_true, return_false): Likewise.
        (call_internal): Use ia64_expand_prediction.  Don't emit stop bit.
        (call_internal1, call_value_internal): Likewise.
        (call_value_internal1, call_multiple_values_internal1): Likewise.

From-SVN: r33793
This commit is contained in:
Richard Henderson 2000-05-08 23:17:35 -07:00 committed by Richard Henderson
parent c66f079e8d
commit 6b6c1201e6
4 changed files with 207 additions and 103 deletions

View File

@ -1,3 +1,40 @@
2000-05-08 Richard Henderson <rth@cygnus.com>
* final.c (current_insn_predicate): New.
(final_scan_insn): Set it.
* output.h (current_insn_predicate): Declare.
* ifcvt.c (cond_exec_process_insns): New argument prob_val.
Attach it to call insns.
(cond_exec_process_if_block): Track probability for true and
false branches.
(dead_or_predicable): Likewise.
* predict.c (PROB_NEVER, PROB_VERY_UNLIKELY): New.
(PROB_UNLIKELY, PROB_EVEN, PROB_LIKELY): New.
(PROB_VERY_LIKELY, PROB_ALWAYS): New.
(estimate_probability, expected_value_to_br_prob): Use them.
* config/ia64/ia64-protos.h (ia64_expand_prediction): Declare.
* config/ia64/ia64.c (ia64_print_operand_address): Handle 'j'.
(ia64_expand_prediction): New.
(emit_insn_group_barriers): Don't look at notes. Emit barriers
as needed after calls.
(ia64_epilogue_uses): Mark b0 used.
* config/ia64/ia64.md (beq_true, beq_false): Remove.
(bne_true, bne_false): Remove.
(eq_return, eq_not_return, ne_return, ne_not_return): Remove.
(br_true, br_false): New. Use predicate_operator and
ia64_expand_prediction.
(return_true, return_false): Likewise.
(call_internal): Use ia64_expand_prediction. Don't emit stop bit.
(call_internal1, call_value_internal): Likewise.
(call_value_internal1, call_multiple_values_internal1): Likewise.
* config/ia64/ia64.c (ia64_direct_return): Require frame size zero.
* config/ia64/ia64.md (extendsfdf2): Split only after reload.
2000-05-08 Richard Henderson <rth@cygnus.com>
* rtlanal.c (may_trap_p): Consider old-style and volatile

View File

@ -61,6 +61,7 @@ extern enum reg_class ia64_secondary_reload_class PARAMS((enum reg_class,
enum machine_mode,
rtx));
extern void ia64_reorg PARAMS((rtx));
extern const char *ia64_expand_prediction PARAMS((rtx, const char *));
#endif /* RTX_CODE */
#ifdef TREE_CODE

View File

@ -1695,6 +1695,7 @@ ia64_print_operand_address (stream, address)
a floating point register emitted normally.
I Invert a predicate register by adding 1.
J Select the proper predicate register for a condition.
j Select the inverse predicate register for a condition.
O Append .acq for volatile load.
P Postincrement of a MEM.
Q Append .rel for volatile store.
@ -1755,7 +1756,15 @@ ia64_print_operand (file, x, code)
return;
case 'J':
fputs (reg_names [REGNO (XEXP (x, 0)) + (GET_CODE (x) == EQ)], file);
case 'j':
{
unsigned int regno = REGNO (XEXP (x, 0));
if (GET_CODE (x) == EQ)
regno += 1;
if (code == 'j')
regno ^= 1;
fputs (reg_names [regno], file);
}
return;
case 'O':
@ -1864,6 +1873,47 @@ ia64_print_operand (file, x, code)
return;
}
/* For conditional branches, returns or calls, substitute
sptk, dptk, dpnt, or spnt for %s. */
const char *
ia64_expand_prediction (insn, template)
rtx insn;
const char *template;
{
static char const pred_name[4][5] = {
"spnt", "dpnt", "dptk", "sptk"
};
static char new_template[64];
int pred_val, pred_which;
rtx note;
note = find_reg_note (insn, REG_BR_PROB, 0);
if (note)
{
pred_val = INTVAL (XEXP (note, 0));
/* Guess top and bottom 10% statically predicted. */
if (pred_val < REG_BR_PROB_BASE / 10)
pred_which = 0;
else if (pred_val < REG_BR_PROB_BASE / 2)
pred_which = 1;
else if (pred_val < REG_BR_PROB_BASE * 9 / 10)
pred_which = 2;
else
pred_which = 3;
}
else
pred_which = 2;
if (strlen (template) >= sizeof (new_template) - 3)
abort ();
sprintf (new_template, template, pred_name[pred_which]);
return new_template;
}
/* This function returns the register class required for a secondary
@ -2654,9 +2704,6 @@ static void
emit_insn_group_barriers (insns)
rtx insns;
{
int need_barrier = 0;
int exception_nesting;
struct reg_flags flags;
rtx insn, prev_insn;
memset (rws_sum, 0, sizeof (rws_sum));
@ -2664,31 +2711,61 @@ emit_insn_group_barriers (insns)
prev_insn = 0;
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
int need_barrier = 0;
struct reg_flags flags;
memset (&flags, 0, sizeof (flags));
switch (GET_CODE (insn))
{
case NOTE:
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_EH_REGION_BEG:
exception_nesting++;
break;
case NOTE_INSN_EH_REGION_END:
exception_nesting--;
break;
case NOTE_INSN_EPILOGUE_BEG:
break;
default:
break;
}
break;
case JUMP_INSN:
case CALL_INSN:
flags.is_branch = 1;
memset (rws_insn, 0, sizeof (rws_insn));
need_barrier = rtx_needs_barrier (PATTERN (insn), flags, 0);
if (need_barrier)
{
/* PREV_INSN null can happen if the very first insn is a
volatile asm. */
if (prev_insn)
emit_insn_after (gen_insn_group_barrier (), prev_insn);
memcpy (rws_sum, rws_insn, sizeof (rws_sum));
}
/* A call must end a group, otherwise the assembler might pack
it in with a following branch and then the function return
goes to the wrong place. Do this unconditionally for
unconditional calls, simply because it (1) looks nicer and
(2) keeps the data structures more accurate for the insns
following the call. */
need_barrier = 1;
if (GET_CODE (PATTERN (insn)) == COND_EXEC)
{
rtx next_insn = insn;
do
next_insn = next_nonnote_insn (next_insn);
while (next_insn
&& GET_CODE (next_insn) == INSN
&& (GET_CODE (PATTERN (next_insn)) == USE
|| GET_CODE (PATTERN (next_insn)) == CLOBBER));
if (next_insn && GET_CODE (next_insn) != JUMP_INSN)
need_barrier = 0;
}
if (need_barrier)
{
emit_insn_after (gen_insn_group_barrier (), insn);
memset (rws_sum, 0, sizeof (rws_sum));
prev_insn = NULL_RTX;
}
break;
case JUMP_INSN:
flags.is_branch = 1;
/* FALLTHRU */
case INSN:
if (GET_CODE (PATTERN (insn)) == USE)
/* Don't care about USE "insns"---those are used to
@ -2698,7 +2775,7 @@ emit_insn_group_barriers (insns)
else
{
memset (rws_insn, 0, sizeof (rws_insn));
need_barrier = rtx_needs_barrier (PATTERN (insn), flags, 0);
need_barrier |= rtx_needs_barrier (PATTERN (insn), flags, 0);
/* Check to see if the previous instruction was a volatile
asm. */
@ -2713,7 +2790,6 @@ emit_insn_group_barriers (insns)
emit_insn_after (gen_insn_group_barrier (), prev_insn);
memcpy (rws_sum, rws_insn, sizeof (rws_sum));
}
need_barrier = 0;
prev_insn = insn;
}
break;
@ -2753,7 +2829,9 @@ ia64_epilogue_uses (regno)
from such a call, we need to make sure the function restores the
original gp-value, even if the function itself does not use the
gp anymore. */
if (regno == R_GR(1) && TARGET_CONST_GP && !(TARGET_AUTO_PIC || TARGET_NO_PIC))
if (regno == R_GR (1)
&& TARGET_CONST_GP
&& !(TARGET_AUTO_PIC || TARGET_NO_PIC))
return 1;
/* For functions defined with the syscall_linkage attribute, all input
@ -2768,6 +2846,11 @@ ia64_epilogue_uses (regno)
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
return 1;
/* Conditional return patterns can't represent the use of `b0' as
the return address, so we force the value live this way. */
if (regno == R_BR (0))
return 1;
return 0;
}

View File

@ -2134,53 +2134,29 @@
operands[3] = ia64_compare_op1;
}")
;; ??? Need a way to choose between dpnt and dptk. Currently, I assume that
;; equality tests will likely fail, and inequality tests will likely succeed.
(define_insn "*beq_true"
(define_insn "*br_true"
[(set (pc)
(if_then_else (eq:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(label_ref (match_operand 1 "" ""))
(if_then_else (match_operator 0 "predicate_operator"
[(match_operand:CC 1 "register_operand" "c")
(const_int 0)])
(label_ref (match_operand 2 "" ""))
(pc)))]
""
"(%I0) br.cond.dpnt %l1"
"* return ia64_expand_prediction (insn, \"(%%J0) br.cond.%s %%l2\");"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*beq_false"
(define_insn "*br_false"
[(set (pc)
(if_then_else (eq:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(if_then_else (match_operator 0 "predicate_operator"
[(match_operand:CC 1 "register_operand" "c")
(const_int 0)])
(pc)
(label_ref (match_operand 1 "" ""))))]
(label_ref (match_operand 2 "" ""))))]
""
"(%0) br.cond.dptk %l1"
"* return ia64_expand_prediction (insn, \"(%%j0) br.cond.%s %%l2\");"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*bne_true"
[(set (pc)
(if_then_else (ne:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(label_ref (match_operand 1 "" ""))
(pc)))]
""
"(%0) br.cond.dptk %l1"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*bne_false"
[(set (pc)
(if_then_else (ne:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(pc)
(label_ref (match_operand 1 "" ""))))]
""
"(%I0) br.cond.dpnt %l1"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
;; ::::::::::::::::::::
;; ::
@ -2716,16 +2692,19 @@
operands[2] = gen_reg_rtx (DImode);
}")
;; ??? A call must end a group, otherwise, the assembler might pack it in
;; a group with a following branch, and then the function return goes to the
;; wrong place. We could perhaps handle this in emit_insn_group_barriers.
(define_insn "call_internal"
[(call (mem:DI (match_operand:DI 0 "call_operand" "bi"))
(match_operand 1 "" ""))
(clobber (match_operand:DI 2 "register_operand" "=b"))]
""
"br.call.sptk.many %2 = %0 ;;"
"*
{
operands[3] = current_insn_predicate;
if (operands[3] != NULL_RTX)
return ia64_expand_prediction (insn, \"(%%J3) br.call.%s.many %2 = %0\");
else
return \"br.call.sptk.many %2 = %0\";
}"
[(set_attr "type" "B")])
(define_insn "*call_internal1"
@ -2734,7 +2713,14 @@
(use (reg:DI 1))
(clobber (match_operand:DI 2 "register_operand" "=b"))]
""
"br.call.sptk.many %2 = %0 ;;"
"*
{
operands[3] = current_insn_predicate;
if (operands[3] != NULL_RTX)
return ia64_expand_prediction (insn, \"(%%J3) br.call.%s.many %2 = %0\");
else
return \"br.call.sptk.many %2 = %0\";
}"
[(set_attr "type" "B")])
;; Subroutine call instruction returning a value. Operand 0 is the hard
@ -2928,17 +2914,20 @@
= gen_rtx_SET (VOIDmode, XEXP (XVECEXP (operands[0], 0, i), 0), call);
}")
;; ??? A call must end a group, otherwise, the assembler might pack it in
;; a group with a following branch, and then the function return goes to the
;; wrong place. We could perhaps handle this in emit_insn_group_barriers.
(define_insn "call_value_internal"
[(set (match_operand 0 "register_operand" "=rf")
(call (mem:DI (match_operand:DI 1 "call_operand" "bi"))
(match_operand 2 "" "")))
(clobber (match_operand:DI 3 "register_operand" "=b"))]
""
"br.call.sptk.many %3 = %1 ;;"
"*
{
operands[4] = current_insn_predicate;
if (operands[4] != NULL_RTX)
return ia64_expand_prediction (insn, \"(%%J4) br.call.%s.many %3 = %1\");
else
return \"br.call.sptk.many %3 = %1\";
}"
[(set_attr "type" "B")])
(define_insn "*call_value_internal1"
@ -2948,7 +2937,14 @@
(use (reg:DI 1))
(clobber (match_operand:DI 3 "register_operand" "=b"))]
""
"br.call.sptk.many %3 = %1 ;;"
"*
{
operands[4] = current_insn_predicate;
if (operands[4] != NULL_RTX)
return ia64_expand_prediction (insn, \"(%%J4) br.call.%s.many %3 = %1\");
else
return \"br.call.sptk.many %3 = %1\";
}"
[(set_attr "type" "B")])
(define_insn "*call_multiple_values_internal1"
@ -2959,7 +2955,14 @@
(use (reg:DI 1))
(clobber (match_operand:DI 4 "register_operand" "=b"))])]
""
"br.call.sptk.many %4 = %2 ;;"
"*
{
operands[5] = current_insn_predicate;
if (operands[5] != NULL_RTX)
return ia64_expand_prediction (insn, \"(%%J5) br.call.%s.many %4 = %2\");
else
return \"br.call.sptk.many %4 = %2\";
}"
[(set_attr "type" "B")])
;; Call subroutine returning any type.
@ -3004,47 +3007,27 @@
"br.ret.sptk.many rp"
[(set_attr "type" "B")])
(define_insn "*eq_return"
(define_insn "*return_true"
[(set (pc)
(if_then_else (eq:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(if_then_else (match_operator 0 "predicate_operator"
[(match_operand:CC 1 "register_operand" "c")
(const_int 0)])
(return)
(pc)))]
"ia64_direct_return ()"
"(%I0) br.ret.sptk.many rp"
"* return ia64_expand_prediction (insn, \"(%%J0) br.ret.%s.many rp\");"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*eq_not_return"
(define_insn "*return_false"
[(set (pc)
(if_then_else (eq:CC (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(if_then_else (match_operator 0 "predicate_operator"
[(match_operand:CC 1 "register_operand" "c")
(const_int 0)])
(pc)
(return)))]
"ia64_direct_return ()"
"(%0) br.ret.sptk.many rp"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*ne_return"
[(set (pc)
(if_then_else (ne (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(return)
(pc)))]
"ia64_direct_return ()"
"(%0) br.ret.sptk.many rp"
[(set_attr "type" "B")
(set_attr "predicable" "no")])
(define_insn "*ne_not_return"
[(set (pc)
(if_then_else (ne (match_operand:CC 0 "register_operand" "c")
(const_int 0))
(pc)
(return)))]
"ia64_direct_return ()"
"(%I0) br.ret.sptk.many rp"
"* return ia64_expand_prediction (insn, \"(%%j0) br.ret.%s.many rp\");"
[(set_attr "type" "B")
(set_attr "predicable" "no")])