Split update_cfg_for_uncondjump out of combine

Later patches want to reuse combine's update_cfg_for_uncondjump,
so this patch makes it a public cfgrtl.c function.

gcc/
	* cfgrtl.h (update_cfg_for_uncondjump): Declare.
	* combine.c (update_cfg_for_uncondjump): Move to...
	* cfgrtl.c: ...here.
This commit is contained in:
Richard Sandiford 2020-12-17 00:15:03 +00:00
parent d4b520d88e
commit 21335c4857
3 changed files with 48 additions and 36 deletions

View File

@ -3419,6 +3419,53 @@ fixup_abnormal_edges (void)
return inserted;
}
/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
Note that the INSN should be deleted *after* removing dead edges, so
that the kept edge is the fallthrough edge for a (set (pc) (pc))
but not for a (set (pc) (label_ref FOO)). */
void
update_cfg_for_uncondjump (rtx_insn *insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
gcc_assert (BB_END (bb) == insn);
purge_dead_edges (bb);
if (current_ir_type () != IR_RTL_CFGLAYOUT)
{
if (!find_fallthru_edge (bb->succs))
{
auto barrier = next_nonnote_nondebug_insn (insn);
if (!barrier || !BARRIER_P (barrier))
emit_barrier_after (insn);
}
return;
}
delete_insn (insn);
if (EDGE_COUNT (bb->succs) == 1)
{
rtx_insn *insn;
single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
/* Remove barriers from the footer if there are any. */
for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
else
BB_FOOTER (bb) = NEXT_INSN (insn);
if (NEXT_INSN (insn))
SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
else if (LABEL_P (insn))
break;
}
}
/* Cut the insns from FIRST to LAST out of the insns stream. */
rtx_insn *

View File

@ -47,6 +47,7 @@ extern void fixup_partitions (void);
extern bool purge_dead_edges (basic_block);
extern bool purge_all_dead_edges (void);
extern bool fixup_abnormal_edges (void);
extern void update_cfg_for_uncondjump (rtx_insn *);
extern rtx_insn *unlink_insn_chain (rtx_insn *, rtx_insn *);
extern void relink_block_chain (bool);
extern rtx_insn *duplicate_insn_chain (rtx_insn *, rtx_insn *,

View File

@ -2531,42 +2531,6 @@ reg_subword_p (rtx x, rtx reg)
&& GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
}
/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
Note that the INSN should be deleted *after* removing dead edges, so
that the kept edge is the fallthrough edge for a (set (pc) (pc))
but not for a (set (pc) (label_ref FOO)). */
static void
update_cfg_for_uncondjump (rtx_insn *insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
gcc_assert (BB_END (bb) == insn);
purge_dead_edges (bb);
delete_insn (insn);
if (EDGE_COUNT (bb->succs) == 1)
{
rtx_insn *insn;
single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
/* Remove barriers from the footer if there are any. */
for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
else
BB_FOOTER (bb) = NEXT_INSN (insn);
if (NEXT_INSN (insn))
SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
else if (LABEL_P (insn))
break;
}
}
/* Return whether PAT is a PARALLEL of exactly N register SETs followed
by an arbitrary number of CLOBBERs. */
static bool