make next/prev active_insn and active_insn_p take rtx_insn *

gcc/ChangeLog:

2016-09-22  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* emit-rtl.c (next_active_insn): Change argument type to
	rtx_insn *.
	(prev_active_insn): Likewise.
	(active_insn_p): Likewise.
	* rtl.h: Adjust prototypes.
	* cfgcleanup.c (merge_blocks_move_successor_nojumps): Adjust.
	* config/arc/arc.md: Likewise.
	* config/pa/pa.c (branch_to_delay_slot_p): Likewise.
	(branch_needs_nop_p): Likewise.
	(use_skip_p): Likewise.
	* config/sh/sh.c (gen_block_redirect): Likewise.
	(split_branches): Likewise.
	* reorg.c (optimize_skip): Likewise.
	(fill_simple_delay_slots): Likewise.
	(fill_slots_from_thread): Likewise.
	(relax_delay_slots): Likewise.
	* resource.c (mark_target_live_regs): Likewise.

From-SVN: r240361
This commit is contained in:
Trevor Saunders 2016-09-22 13:16:41 +00:00 committed by Trevor Saunders
parent 1f00691e9b
commit 7c9796eddb
9 changed files with 75 additions and 41 deletions

View File

@ -1,3 +1,23 @@
2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* emit-rtl.c (next_active_insn): Change argument type to
rtx_insn *.
(prev_active_insn): Likewise.
(active_insn_p): Likewise.
* rtl.h: Adjust prototypes.
* cfgcleanup.c (merge_blocks_move_successor_nojumps): Adjust.
* config/arc/arc.md: Likewise.
* config/pa/pa.c (branch_to_delay_slot_p): Likewise.
(branch_needs_nop_p): Likewise.
(use_skip_p): Likewise.
* config/sh/sh.c (gen_block_redirect): Likewise.
(split_branches): Likewise.
* reorg.c (optimize_skip): Likewise.
(fill_simple_delay_slots): Likewise.
(fill_slots_from_thread): Likewise.
(relax_delay_slots): Likewise.
* resource.c (mark_target_live_regs): Likewise.
2016-09-22 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
* config/cris/cris.c (cris_asm_output_case_end): Change argument

View File

@ -708,7 +708,7 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
/* If there is a jump table following block B temporarily add the jump table
to block B so that it will also be moved to the correct location. */
if (tablejump_p (BB_END (b), &label, &table)
&& prev_active_insn (label) == BB_END (b))
&& prev_active_insn (as_a<rtx_insn *> (label)) == BB_END (b))
{
BB_END (b) = table;
}

View File

@ -5122,16 +5122,29 @@
scan = as_a <rtx_insn *> (XEXP (SET_SRC (PATTERN (scan)), 0));
continue;
}
if (JUMP_LABEL (scan)
/* JUMP_LABEL might be simple_return instead if an insn. */
&& (!INSN_P (JUMP_LABEL (scan))
|| (!next_active_insn (JUMP_LABEL (scan))
|| (recog_memoized (next_active_insn (JUMP_LABEL (scan)))
!= CODE_FOR_doloop_begin_i)))
&& (!next_active_insn (NEXT_INSN (PREV_INSN (scan)))
|| (recog_memoized
(next_active_insn (NEXT_INSN (PREV_INSN (scan))))
!= CODE_FOR_doloop_begin_i)))
rtx lab = JUMP_LABEL (scan);
if (!lab)
break;
rtx_insn *next_scan
= next_active_insn (NEXT_INSN (PREV_INSN (scan)));
if (next_scan
&& recog_memoized (next_scan) != CODE_FOR_doloop_begin_i)
break;
/* JUMP_LABEL might be simple_return instead if an insn. */
if (!INSN_P (lab))
{
n_insns++;
break;
}
rtx_insn *next_lab = next_active_insn (as_a<rtx_insn *> (lab));
if (next_lab
&& recog_memoized (next_lab) != CODE_FOR_doloop_begin_i)
break;
n_insns++;
}
break;

View File

@ -6445,7 +6445,7 @@ branch_to_delay_slot_p (rtx_insn *insn)
if (dbr_sequence_length ())
return FALSE;
jump_insn = next_active_insn (JUMP_LABEL (insn));
jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{
insn = next_active_insn (insn);
@ -6479,7 +6479,7 @@ branch_needs_nop_p (rtx_insn *insn)
if (dbr_sequence_length ())
return FALSE;
jump_insn = next_active_insn (JUMP_LABEL (insn));
jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{
insn = next_active_insn (insn);
@ -6502,7 +6502,7 @@ branch_needs_nop_p (rtx_insn *insn)
static bool
use_skip_p (rtx_insn *insn)
{
rtx_insn *jump_insn = next_active_insn (JUMP_LABEL (insn));
rtx_insn *jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
while (insn)
{

View File

@ -5503,7 +5503,8 @@ gen_block_redirect (rtx_insn *jump, int addr, int need_block)
else if (optimize && need_block >= 0)
{
rtx_insn *next = next_active_insn (next_active_insn (dest));
rtx_insn *next = next_active_insn (as_a<rtx_insn *> (dest));
next = next_active_insn (next);
if (next && JUMP_P (next)
&& GET_CODE (PATTERN (next)) == SET
&& recog_memoized (next) == CODE_FOR_jump_compact)
@ -6395,9 +6396,8 @@ split_branches (rtx_insn *first)
/* We can't use JUMP_LABEL here because it might be undefined
when not optimizing. */
/* A syntax error might cause beyond to be NULL_RTX. */
beyond
= next_active_insn (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1),
0));
rtx temp = XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0);
beyond = next_active_insn (as_a<rtx_insn *> (temp));
if (beyond
&& (JUMP_P (beyond)

View File

@ -3490,7 +3490,7 @@ last_call_insn (void)
standalone USE and CLOBBER insn. */
int
active_insn_p (const_rtx insn)
active_insn_p (const rtx_insn *insn)
{
return (CALL_P (insn) || JUMP_P (insn)
|| JUMP_TABLE_DATA_P (insn) /* FIXME */
@ -3501,10 +3501,8 @@ active_insn_p (const_rtx insn)
}
rtx_insn *
next_active_insn (rtx uncast_insn)
next_active_insn (rtx_insn *insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3520,10 +3518,8 @@ next_active_insn (rtx uncast_insn)
standalone USE and CLOBBER insn. */
rtx_insn *
prev_active_insn (rtx uncast_insn)
prev_active_insn (rtx_insn *insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);

View File

@ -749,7 +749,7 @@ optimize_skip (rtx_jump_insn *insn, vec<rtx_insn *> *delay_list)
we have one insn followed by a branch to the same label we branch to.
In both of these cases, inverting the jump and annulling the delay
slot give the same effect in fewer insns. */
if (next_trial == next_active_insn (JUMP_LABEL (insn))
if (next_trial == next_active_insn (JUMP_LABEL_AS_INSN (insn))
|| (next_trial != 0
&& simplejump_or_return_p (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
@ -2198,7 +2198,7 @@ fill_simple_delay_slots (int non_jumps_p)
&& trial
&& jump_to_label_p (trial)
&& simplejump_p (trial)
&& (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
&& (next_trial = next_active_insn (JUMP_LABEL_AS_INSN (trial))) != 0
&& ! (NONJUMP_INSN_P (next_trial)
&& GET_CODE (PATTERN (next_trial)) == SEQUENCE)
&& !JUMP_P (next_trial)
@ -2238,8 +2238,8 @@ fill_simple_delay_slots (int non_jumps_p)
&& simplejump_p (jump_insn)
&& slots_filled != slots_to_fill)
fill_slots_from_thread (jump_insn, const_true_rtx,
next_active_insn (JUMP_LABEL (insn)), NULL, 1,
1, own_thread_p (JUMP_LABEL (insn),
next_active_insn (JUMP_LABEL_AS_INSN (insn)),
NULL, 1, 1, own_thread_p (JUMP_LABEL (insn),
JUMP_LABEL (insn), 0),
slots_to_fill, &slots_filled, &delay_list);
@ -2575,7 +2575,8 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx condition,
to call update_block and delete_insn. */
fix_reg_dead_note (prior_insn, insn);
update_reg_unused_notes (prior_insn, new_thread);
new_thread = next_active_insn (new_thread);
new_thread
= next_active_insn (as_a<rtx_insn *> (new_thread));
}
break;
}
@ -3079,7 +3080,7 @@ delete_jump (rtx_insn *insn)
}
static rtx_insn *
label_before_next_insn (rtx x, rtx scan_limit)
label_before_next_insn (rtx_insn *x, rtx scan_limit)
{
rtx_insn *insn = next_active_insn (x);
while (insn)
@ -3142,7 +3143,8 @@ relax_delay_slots (rtx_insn *first)
if (ANY_RETURN_P (target_label))
target_label = find_end_label (target_label);
if (target_label && next_active_insn (target_label) == next
if (target_label
&& next_active_insn (as_a<rtx_insn *> (target_label)) == next
&& ! condjump_in_parallel_p (jump_insn)
&& ! (next && switch_text_sections_between_p (jump_insn, next)))
{
@ -3163,7 +3165,8 @@ relax_delay_slots (rtx_insn *first)
if (next && simplejump_or_return_p (next)
&& any_condjump_p (jump_insn)
&& target_label
&& next_active_insn (target_label) == next_active_insn (next)
&& (next_active_insn (as_a<rtx_insn *> (target_label))
== next_active_insn (next))
&& no_labels_between_p (jump_insn, next)
&& targetm.can_follow_jump (jump_insn, next))
{
@ -3318,7 +3321,7 @@ relax_delay_slots (rtx_insn *first)
{
/* Figure out where to emit the special USE insn so we don't
later incorrectly compute register live/death info. */
rtx_insn *tmp = next_active_insn (trial);
rtx_insn *tmp = next_active_insn (as_a<rtx_insn *> (trial));
if (tmp == 0)
tmp = find_end_label (simple_return_rtx);
@ -3366,7 +3369,7 @@ relax_delay_slots (rtx_insn *first)
/* See if we have a simple (conditional) jump that is useless. */
if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn)
&& ! condjump_in_parallel_p (delay_jump_insn)
&& prev_active_insn (target_label) == insn
&& prev_active_insn (as_a<rtx_insn *> (target_label)) == insn
&& ! BARRIER_P (prev_nonnote_insn (as_a<rtx_insn *> (target_label)))
/* If the last insn in the delay slot sets CC0 for some insn,
various code assumes that it is in a delay slot. We could
@ -3429,7 +3432,8 @@ relax_delay_slots (rtx_insn *first)
if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn)
&& any_condjump_p (delay_jump_insn)
&& next && simplejump_or_return_p (next)
&& next_active_insn (target_label) == next_active_insn (next)
&& (next_active_insn (as_a<rtx_insn *> (target_label))
== next_active_insn (next))
&& no_labels_between_p (insn, next))
{
rtx label = JUMP_LABEL (next);
@ -3480,7 +3484,8 @@ relax_delay_slots (rtx_insn *first)
try_merge_delay_insns (insn, next);
else if (! INSN_FROM_TARGET_P (pat->insn (1))
&& own_thread_p (target_label, target_label, 0))
try_merge_delay_insns (insn, next_active_insn (target_label));
try_merge_delay_insns (insn,
next_active_insn (as_a<rtx_insn *> (target_label)));
/* If we get here, we haven't deleted INSN. But we may have deleted
NEXT, so recompute it. */

View File

@ -1122,7 +1122,7 @@ mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resource
rtx_insn *stop_insn = next_active_insn (jump_insn);
if (!ANY_RETURN_P (jump_target))
jump_target = next_active_insn (jump_target);
jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
mark_target_live_regs (insns, jump_target, &new_resources);
CLEAR_RESOURCE (&set);
CLEAR_RESOURCE (&needed);

View File

@ -2844,9 +2844,9 @@ extern rtx_insn *prev_nonnote_nondebug_insn (rtx_insn *);
extern rtx_insn *next_nonnote_nondebug_insn (rtx_insn *);
extern rtx_insn *prev_real_insn (rtx_insn *);
extern rtx_insn *next_real_insn (rtx);
extern rtx_insn *prev_active_insn (rtx);
extern rtx_insn *next_active_insn (rtx);
extern int active_insn_p (const_rtx);
extern rtx_insn *prev_active_insn (rtx_insn *);
extern rtx_insn *next_active_insn (rtx_insn *);
extern int active_insn_p (const rtx_insn *);
extern rtx_insn *next_cc0_user (rtx);
extern rtx_insn *prev_cc0_setter (rtx_insn *);