rtl.h (INSN_ANNULLED_BRANCH_P): Only allow JUMP_INSN.

* rtl.h (INSN_ANNULLED_BRANCH_P): Only allow JUMP_INSN.
        * dwarf2cfi.c (scan_trace): Test JUMP_P before INSN_ANNULLED_BRANCH_P.
        * resource.c (next_insn_no_annul): Likewise.
        (mark_set_resources): Likewise.
        * reorg.c (delete_from_delay_slot): Likewise.
        (dbr_schedule, redundant_insn, try_merge_delay_insns): Likewise.
        (get_branch_condition): Test pc_rtx and LABEL_REF before dereferencing.

From-SVN: r177944
This commit is contained in:
Richard Henderson 2011-08-21 11:49:49 -07:00 committed by Richard Henderson
parent b2f7ebc145
commit 8f06d483ec
5 changed files with 53 additions and 25 deletions

View File

@ -1,3 +1,13 @@
2011-08-21 Richard Henderson <rth@redhat.com>
* rtl.h (INSN_ANNULLED_BRANCH_P): Only allow JUMP_INSN.
* dwarf2cfi.c (scan_trace): Test JUMP_P before INSN_ANNULLED_BRANCH_P.
* resource.c (next_insn_no_annul): Likewise.
(mark_set_resources): Likewise.
* reorg.c (delete_from_delay_slot): Likewise.
(dbr_schedule, redundant_insn, try_merge_delay_insns): Likewise.
(get_branch_condition): Test pc_rtx and LABEL_REF before dereferencing.
2011-08-21 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md (any_div): Remove.

View File

@ -2427,7 +2427,7 @@ scan_trace (dw_trace_info *trace)
notice_eh_throw (control);
dwarf2out_flush_queued_reg_saves ();
if (INSN_ANNULLED_BRANCH_P (control))
if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
{
/* ??? Hopefully multiple delay slots are not annulled. */
gcc_assert (n == 2);

View File

@ -667,7 +667,7 @@ delete_from_delay_slot (rtx insn)
annul flag. */
if (delay_list)
trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
else if (INSN_P (trial))
else if (JUMP_P (trial))
INSN_ANNULLED_BRANCH_P (trial) = 0;
INSN_FROM_TARGET_P (insn) = 0;
@ -1060,13 +1060,15 @@ get_branch_condition (rtx insn, rtx target)
return const_true_rtx;
else if (GET_CODE (src) == IF_THEN_ELSE
&& XEXP (XEXP (src, 1), 0) == target
&& XEXP (src, 2) == pc_rtx)
&& XEXP (src, 2) == pc_rtx
&& GET_CODE (XEXP (src, 1)) == LABEL_REF
&& XEXP (XEXP (src, 1), 0) == target)
return XEXP (src, 0);
else if (GET_CODE (src) == IF_THEN_ELSE
&& XEXP (XEXP (src, 2), 0) == target
&& XEXP (src, 1) == pc_rtx)
&& XEXP (src, 1) == pc_rtx
&& GET_CODE (XEXP (src, 2)) == LABEL_REF
&& XEXP (XEXP (src, 2), 0) == target)
{
enum rtx_code rev;
rev = reversed_comparison_code (XEXP (src, 0), insn);
@ -1433,7 +1435,7 @@ try_merge_delay_insns (rtx insn, rtx thread)
{
rtx trial, next_trial;
rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
int slot_number = 1;
int num_slots = XVECLEN (PATTERN (insn), 0);
rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
@ -1517,7 +1519,8 @@ try_merge_delay_insns (rtx insn, rtx thread)
if (slot_number != num_slots
&& trial && NONJUMP_INSN_P (trial)
&& GET_CODE (PATTERN (trial)) == SEQUENCE
&& ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
&& !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
&& INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
{
rtx pat = PATTERN (trial);
rtx filled_insn = XVECEXP (pat, 0, 0);
@ -1756,24 +1759,30 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
if (GET_CODE (pat) == SEQUENCE)
{
bool annul_p = false;
rtx control = XVECEXP (pat, 0, 0);
/* If this is a CALL_INSN and its delay slots, it is hard to track
the resource needs properly, so give up. */
if (CALL_P (XVECEXP (pat, 0, 0)))
if (CALL_P (control))
return 0;
/* If this is an INSN or JUMP_INSN with delayed effects, it
is hard to track the resource needs properly, so give up. */
#ifdef INSN_SETS_ARE_DELAYED
if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
if (INSN_SETS_ARE_DELAYED (control))
return 0;
#endif
#ifdef INSN_REFERENCES_ARE_DELAYED
if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
if (INSN_REFERENCES_ARE_DELAYED (control))
return 0;
#endif
if (JUMP_P (control))
annul_p = INSN_ANNULLED_BRANCH_P (control);
/* See if any of the insns in the delay slot match, updating
resource requirements as we go. */
for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
@ -1783,8 +1792,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
/* If an insn will be annulled if the branch is false, it isn't
considered as a possible duplicate insn. */
if (rtx_equal_p (PATTERN (candidate), ipat)
&& ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
&& INSN_FROM_TARGET_P (candidate)))
&& ! (annul_p && INSN_FROM_TARGET_P (candidate)))
{
/* Show that this insn will be used in the sequel. */
INSN_FROM_TARGET_P (candidate) = 0;
@ -1793,15 +1801,14 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
/* Unless this is an annulled insn from the target of a branch,
we must stop if it sets anything needed or set by INSN. */
if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
|| ! INSN_FROM_TARGET_P (candidate))
if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
&& insn_sets_resource_p (candidate, &needed, true))
return 0;
}
/* If the insn requiring the delay slot conflicts with INSN, we
must stop. */
if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, true))
if (insn_sets_resource_p (control, &needed, true))
return 0;
}
else
@ -3867,7 +3874,8 @@ dbr_schedule (rtx first)
{
rtx target;
INSN_ANNULLED_BRANCH_P (insn) = 0;
if (JUMP_P (insn))
INSN_ANNULLED_BRANCH_P (insn) = 0;
INSN_FROM_TARGET_P (insn) = 0;
/* Skip vector tables. We can't get attributes for them. */
@ -3977,10 +3985,12 @@ dbr_schedule (rtx first)
{
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
{
rtx control;
j = XVECLEN (PATTERN (insn), 0) - 1;
if (j > MAX_DELAY_HISTOGRAM)
j = MAX_DELAY_HISTOGRAM;
if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
control = XVECEXP (PATTERN (insn), 0, 0);
if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
total_annul_slots[j]++;
else
total_delay_slots[j]++;

View File

@ -171,7 +171,7 @@ next_insn_no_annul (rtx insn)
{
/* If INSN is an annulled branch, skip any insns from the target
of the branch. */
if (INSN_P (insn)
if (JUMP_P (insn)
&& INSN_ANNULLED_BRANCH_P (insn)
&& NEXT_INSN (PREV_INSN (insn)) != insn)
{
@ -710,10 +710,18 @@ mark_set_resources (rtx x, struct resources *res, int in_dest,
return;
case SEQUENCE:
for (i = 0; i < XVECLEN (x, 0); i++)
if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
&& INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
{
rtx control = XVECEXP (x, 0, 0);
bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
mark_set_resources (control, res, 0, mark_type);
for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
{
rtx elt = XVECEXP (x, 0, i);
if (!annul_p && INSN_FROM_TARGET_P (elt))
mark_set_resources (elt, res, 0, mark_type);
}
}
return;
case POST_INC:

View File

@ -278,7 +278,7 @@ struct GTY((chain_next ("RTX_NEXT (&%h)"),
constants pool.
1 in a CALL_INSN logically equivalent to ECF_CONST and TREE_READONLY.
1 in a NOTE, or EXPR_LIST for a const call.
1 in a JUMP_INSN, CALL_INSN, or INSN of an annulling branch. */
1 in a JUMP_INSN of an annulling branch. */
unsigned int unchanging : 1;
/* 1 in a MEM or ASM_OPERANDS expression if the memory reference is volatile.
1 in an INSN, CALL_INSN, JUMP_INSN, CODE_LABEL, BARRIER, or NOTE
@ -834,7 +834,7 @@ extern void rtl_check_failed_flag (const char *, const_rtx, const char *,
/* 1 if RTX is a jump_insn, call_insn, or insn that is an annulling branch. */
#define INSN_ANNULLED_BRANCH_P(RTX) \
(RTL_FLAG_CHECK3("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN, CALL_INSN, INSN)->unchanging)
(RTL_FLAG_CHECK1("INSN_ANNULLED_BRANCH_P", (RTX), JUMP_INSN)->unchanging)
/* 1 if RTX is an insn in a delay slot and is from the target of the branch.
If the branch insn has INSN_ANNULLED_BRANCH_P set, this insn should only be