basic-block.h (BLOCK_HEAD, BLOCK_END): Remove.

2003-12-11  Steven Bosscher  <steven@gcc.gnu.org>

	* basic-block.h (BLOCK_HEAD, BLOCK_END): Remove.
	(BLOCK_HEAD_TREE, BLOCK_END_TREE): Remove.
	(basic_block_def): Rename `head' to `head_' and `end' to `end_'.
	(BB_HEAD, BB_END): New accessor macros for the `head_' and `end_'
	fields of a basic block.
	* bb-reorder.c, bt-load.c, caller-save.c, cfg.c, cfganal.c,
	cfgbuild.c, cfgcleanup.c, cfglayout.c, cfgloop.c, cfgloopanal.c,
	cfgloopmanip.c, cfgrtl.c, combine.c, conflict.c, df.c, emit-rtl.c,
	final.c, flow.c, function.c, gcse.c, global.c, graph.c,
	haifa-sched.c, ifcvt.c, lcm.c, local-alloc.c, loop-unswitch.c,
	loop.c, postreload.c, predict.c, profile.c, ra-build.c, ra-debug.c,
	ra-rewrite.c, ra.c, recog.c, reg-stack.c, regclass.c, regmove.c,
	regrename.c, reload1.c, resource.c, sched-ebb.c, sched-rgn.c,
	sibcall.c, tracer.c, config/frv/frv.c, config/i386/i386.c,
	config/ia64/ia64.c: Use the BB_HEAD and BB_END macros instead of
	accessing the `head' and `end' fields of a basic block directly.

	* gengtype.c: Add missing piece from earlier patch.  Dunno what
	I was thinking...

From-SVN: r74520
This commit is contained in:
Steven Bosscher 2003-12-11 00:20:51 +00:00
parent 2086c099f6
commit a813c11120
52 changed files with 662 additions and 633 deletions

View File

@ -1,3 +1,25 @@
2003-12-11 Steven Bosscher <steven@gcc.gnu.org>
* basic-block.h (BLOCK_HEAD, BLOCK_END): Remove.
(BLOCK_HEAD_TREE, BLOCK_END_TREE): Remove.
(basic_block_def): Rename `head' to `head_' and `end' to `end_'.
(BB_HEAD, BB_END): New accessor macros for the `head_' and `end_'
fields of a basic block.
* bb-reorder.c, bt-load.c, caller-save.c, cfg.c, cfganal.c,
cfgbuild.c, cfgcleanup.c, cfglayout.c, cfgloop.c, cfgloopanal.c,
cfgloopmanip.c, cfgrtl.c, combine.c, conflict.c, df.c, emit-rtl.c,
final.c, flow.c, function.c, gcse.c, global.c, graph.c,
haifa-sched.c, ifcvt.c, lcm.c, local-alloc.c, loop-unswitch.c,
loop.c, postreload.c, predict.c, profile.c, ra-build.c, ra-debug.c,
ra-rewrite.c, ra.c, recog.c, reg-stack.c, regclass.c, regmove.c,
regrename.c, reload1.c, resource.c, sched-ebb.c, sched-rgn.c,
sibcall.c, tracer.c, config/frv/frv.c, config/i386/i386.c,
config/ia64/ia64.c: Use the BB_HEAD and BB_END macros instead of
accessing the `head' and `end' fields of a basic block directly.
* gengtype.c: Teach about "void**" pointers and "void *" function
types. Fixes earlier commit.
2003-12-10 Geoffrey Keating <geoffk@apple.com>
* doc/extend.texi (Vector Extensions): Document that bitwise

View File

@ -193,7 +193,7 @@ struct loops;
/* Basic block information indexed by block number. */
typedef struct basic_block_def {
/* The first and last insns of the block. */
rtx head, end;
rtx head_, end_;
/* The first and last trees of the block. */
tree head_tree;
@ -316,11 +316,8 @@ extern struct obstack flow_obstack;
/* Stuff for recording basic block info. */
#define BLOCK_HEAD(B) (BASIC_BLOCK (B)->head)
#define BLOCK_END(B) (BASIC_BLOCK (B)->end)
#define BLOCK_HEAD_TREE(B) (BASIC_BLOCK (B)->head_tree)
#define BLOCK_END_TREE(B) (BASIC_BLOCK (B)->end_tree)
#define BB_HEAD(B) (B)->head_
#define BB_END(B) (B)->end_
/* Special block numbers [markers] for entry and exit. */
#define ENTRY_BLOCK (-1)

View File

@ -312,7 +312,7 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
/* Duplicate HEADER if it is a small block containing cond jump
in the end. */
if (any_condjump_p (header->end) && copy_bb_p (header, 0))
if (any_condjump_p (BB_END (header)) && copy_bb_p (header, 0))
{
copy_bb (header, prev_bb->succ, prev_bb, trace_n);
}
@ -1011,7 +1011,7 @@ copy_bb_p (basic_block bb, int code_may_grow)
if (code_may_grow && maybe_hot_bb_p (bb))
max_size *= 8;
for (insn = bb->head; insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))

View File

@ -477,7 +477,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
&& REGNO_REG_SET_P (bb->global_live_at_start, reg))
SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
for (insn = bb->head, last = NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
insn != last;
insn = NEXT_INSN (insn), insn_luid++)
{
@ -629,7 +629,7 @@ link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
rtx last;
sbitmap_union_of_preds (reaching_defs, bb_out, i);
for (insn = bb->head, last = NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
insn != last;
insn = NEXT_INSN (insn))
{
@ -1058,7 +1058,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
Replace all uses of the old target register definition by
uses of the new definition. Delete the old definition. */
basic_block b = new_def_bb;
rtx insp = b->head;
rtx insp = BB_HEAD (b);
rtx old_insn = def->insn;
rtx src;
rtx btr_rtx;
@ -1131,7 +1131,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
static int
can_move_up (basic_block bb, rtx insn, int n_insns)
{
while (insn != bb->head && n_insns > 0)
while (insn != BB_HEAD (bb) && n_insns > 0)
{
insn = PREV_INSN (insn);
/* ??? What if we have an anti-dependency that actually prevents the

View File

@ -832,8 +832,8 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
}
}
CLEAR_REG_SET (&new->dead_or_set);
if (chain->insn == BLOCK_HEAD (chain->block))
BLOCK_HEAD (chain->block) = new->insn;
if (chain->insn == BB_HEAD (BASIC_BLOCK (chain->block)))
BB_HEAD (BASIC_BLOCK (chain->block)) = new->insn;
}
else
{
@ -852,8 +852,8 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
note_stores (PATTERN (chain->insn), add_stored_regs,
&new->live_throughout);
CLEAR_REG_SET (&new->dead_or_set);
if (chain->insn == BLOCK_END (chain->block))
BLOCK_END (chain->block) = new->insn;
if (chain->insn == BB_END (BASIC_BLOCK (chain->block)))
BB_END (BASIC_BLOCK (chain->block)) = new->insn;
}
new->block = chain->block;
new->is_caller_save_insn = 1;

View File

@ -543,7 +543,7 @@ dump_flow_info (FILE *file)
gcov_type lsum;
fprintf (file, "\nBasic block %d: first insn %d, last %d, ",
bb->index, INSN_UID (bb->head), INSN_UID (bb->end));
bb->index, INSN_UID (BB_HEAD (bb)), INSN_UID (BB_END (bb)));
fprintf (file, "prev %d, next %d, ",
bb->prev_bb->index, bb->next_bb->index);
fprintf (file, "loop_depth %d, count ", bb->loop_depth);

View File

@ -89,7 +89,7 @@ forwarder_block_p (basic_block bb)
|| !bb->succ || bb->succ->succ_next)
return false;
for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
if (INSN_P (insn) && flow_active_insn_p (insn))
return false;
@ -103,8 +103,8 @@ forwarder_block_p (basic_block bb)
bool
can_fallthru (basic_block src, basic_block target)
{
rtx insn = src->end;
rtx insn2 = target == EXIT_BLOCK_PTR ? NULL : target->head;
rtx insn = BB_END (src);
rtx insn2 = target == EXIT_BLOCK_PTR ? NULL : BB_HEAD (target);
if (src->next_bb != target)
return 0;
@ -232,11 +232,11 @@ set_edge_can_fallthru_flag (void)
CAN_FALLTHRU edges. */
if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
continue;
if (!any_condjump_p (bb->end))
if (!any_condjump_p (BB_END (bb)))
continue;
if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
if (!invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0))
continue;
invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
invert_jump (BB_END (bb), JUMP_LABEL (BB_END (bb)), 0);
bb->succ->flags |= EDGE_CAN_FALLTHRU;
bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
}
@ -305,10 +305,10 @@ flow_call_edges_add (sbitmap blocks)
if (check_last_block)
{
basic_block bb = EXIT_BLOCK_PTR->prev_bb;
rtx insn = bb->end;
rtx insn = BB_END (bb);
/* Back up past insns that must be kept in the same block as a call. */
while (insn != bb->head
while (insn != BB_HEAD (bb)
&& keep_with_call_p (insn))
insn = PREV_INSN (insn);
@ -342,7 +342,7 @@ flow_call_edges_add (sbitmap blocks)
if (blocks && !TEST_BIT (blocks, i))
continue;
for (insn = bb->end; ; insn = prev_insn)
for (insn = BB_END (bb); ; insn = prev_insn)
{
prev_insn = PREV_INSN (insn);
if (need_fake_edge_p (insn))
@ -353,7 +353,7 @@ flow_call_edges_add (sbitmap blocks)
/* Don't split the block between a call and an insn that should
remain in the same block as the call. */
if (GET_CODE (insn) == CALL_INSN)
while (split_at_insn != bb->end
while (split_at_insn != BB_END (bb)
&& keep_with_call_p (NEXT_INSN (split_at_insn)))
split_at_insn = NEXT_INSN (split_at_insn);
@ -363,7 +363,7 @@ flow_call_edges_add (sbitmap blocks)
cause us to mark that edge as fake and remove it later. */
#ifdef ENABLE_CHECKING
if (split_at_insn == bb->end)
if (split_at_insn == BB_END (bb))
for (e = bb->succ; e; e = e->succ_next)
if (e->dest == EXIT_BLOCK_PTR)
abort ();
@ -371,7 +371,7 @@ flow_call_edges_add (sbitmap blocks)
/* Note that the following may create a new basic block
and renumber the existing basic blocks. */
if (split_at_insn != bb->end)
if (split_at_insn != BB_END (bb))
{
e = split_block (bb, split_at_insn);
if (e)
@ -381,7 +381,7 @@ flow_call_edges_add (sbitmap blocks)
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
}
if (insn == bb->head)
if (insn == BB_HEAD (bb))
break;
}
}

View File

@ -303,13 +303,14 @@ make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p
enum rtx_code code;
int force_fallthru = 0;
if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALT_ENTRY_P (bb->head))
if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
&& LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
/* Examine the last instruction of the block, and discover the
ways we can leave the block. */
insn = bb->end;
insn = BB_END (bb);
code = GET_CODE (insn);
/* A branch. */
@ -432,7 +433,7 @@ make_edges (rtx label_value_list, basic_block min, basic_block max, int update_p
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
else if (bb->next_bb != EXIT_BLOCK_PTR)
{
if (force_fallthru || insn == bb->next_bb->head)
if (force_fallthru || insn == BB_HEAD (bb->next_bb))
cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
}
}
@ -649,12 +650,12 @@ enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
static void
find_bb_boundaries (basic_block bb)
{
rtx insn = bb->head;
rtx end = bb->end;
rtx insn = BB_HEAD (bb);
rtx end = BB_END (bb);
rtx flow_transfer_insn = NULL_RTX;
edge fallthru = NULL;
if (insn == bb->end)
if (insn == BB_END (bb))
return;
if (GET_CODE (insn) == CODE_LABEL)
@ -670,7 +671,7 @@ find_bb_boundaries (basic_block bb)
{
fallthru = split_block (bb, PREV_INSN (insn));
if (flow_transfer_insn)
bb->end = flow_transfer_insn;
BB_END (bb) = flow_transfer_insn;
bb = fallthru->dest;
remove_edge (fallthru);
@ -684,7 +685,7 @@ find_bb_boundaries (basic_block bb)
if (flow_transfer_insn && inside_basic_block_p (insn))
{
fallthru = split_block (bb, PREV_INSN (insn));
bb->end = flow_transfer_insn;
BB_END (bb) = flow_transfer_insn;
bb = fallthru->dest;
remove_edge (fallthru);
flow_transfer_insn = NULL_RTX;
@ -701,7 +702,7 @@ find_bb_boundaries (basic_block bb)
return and barrier, or possibly other sequence not behaving like
ordinary jump, we need to take care and move basic block boundary. */
if (flow_transfer_insn)
bb->end = flow_transfer_insn;
BB_END (bb) = flow_transfer_insn;
/* We've possibly replaced the conditional jump by conditional jump
followed by cleanup at fallthru edge, so the outgoing edges may
@ -719,7 +720,7 @@ compute_outgoing_frequencies (basic_block b)
if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
{
rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL);
int probability;
if (!note)

View File

@ -129,7 +129,7 @@ try_simplify_condjump (basic_block cbranch_block)
/* Verify that we've got a normal conditional branch at the end
of the block. */
cbranch_insn = cbranch_block->end;
cbranch_insn = BB_END (cbranch_block);
if (!any_condjump_p (cbranch_insn))
return false;
@ -159,7 +159,7 @@ try_simplify_condjump (basic_block cbranch_block)
if (rtl_dump_file)
fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
INSN_UID (cbranch_insn), INSN_UID (jump_block->end));
INSN_UID (cbranch_insn), INSN_UID (BB_END (jump_block)));
/* Success. Update the CFG to match. Note that after this point
the edge variable names appear backwards; the redirection is done
@ -172,19 +172,19 @@ try_simplify_condjump (basic_block cbranch_block)
cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
update_br_prob_note (cbranch_block);
end = jump_block->end;
end = BB_END (jump_block);
/* Deleting a block may produce unreachable code warning even when we are
not deleting anything live. Supress it by moving all the line number
notes out of the block. */
for (insn = jump_block->head; insn != NEXT_INSN (jump_block->end);
for (insn = BB_HEAD (jump_block); insn != NEXT_INSN (BB_END (jump_block));
insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
{
if (insn == jump_block->end)
if (insn == BB_END (jump_block))
{
jump_block->end = PREV_INSN (insn);
BB_END (jump_block) = PREV_INSN (insn);
if (insn == end)
break;
}
@ -300,17 +300,17 @@ thread_jump (int mode, edge e, basic_block b)
}
/* Second branch must end with onlyjump, as we will eliminate the jump. */
if (!any_condjump_p (e->src->end))
if (!any_condjump_p (BB_END (e->src)))
return NULL;
if (!any_condjump_p (b->end) || !onlyjump_p (b->end))
if (!any_condjump_p (BB_END (b)) || !onlyjump_p (BB_END (b)))
{
BB_SET_FLAG (b, BB_NONTHREADABLE_BLOCK);
return NULL;
}
set1 = pc_set (e->src->end);
set2 = pc_set (b->end);
set1 = pc_set (BB_END (e->src));
set2 = pc_set (BB_END (b));
if (((e->flags & EDGE_FALLTHRU) != 0)
!= (XEXP (SET_SRC (set1), 1) == pc_rtx))
reverse1 = true;
@ -318,12 +318,12 @@ thread_jump (int mode, edge e, basic_block b)
cond1 = XEXP (SET_SRC (set1), 0);
cond2 = XEXP (SET_SRC (set2), 0);
if (reverse1)
code1 = reversed_comparison_code (cond1, e->src->end);
code1 = reversed_comparison_code (cond1, BB_END (e->src));
else
code1 = GET_CODE (cond1);
code2 = GET_CODE (cond2);
reversed_code2 = reversed_comparison_code (cond2, b->end);
reversed_code2 = reversed_comparison_code (cond2, BB_END (b));
if (!comparison_dominates_p (code1, code2)
&& !comparison_dominates_p (code1, reversed_code2))
@ -339,7 +339,7 @@ thread_jump (int mode, edge e, basic_block b)
/* Short circuit cases where block B contains some side effects, as we can't
safely bypass it. */
for (insn = NEXT_INSN (b->head); insn != NEXT_INSN (b->end);
for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b));
insn = NEXT_INSN (insn))
if (INSN_P (insn) && side_effects_p (PATTERN (insn)))
{
@ -350,7 +350,7 @@ thread_jump (int mode, edge e, basic_block b)
cselib_init ();
/* First process all values computed in the source basic block. */
for (insn = NEXT_INSN (e->src->head); insn != NEXT_INSN (e->src->end);
for (insn = NEXT_INSN (BB_HEAD (e->src)); insn != NEXT_INSN (BB_END (e->src));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
cselib_process_insn (insn);
@ -362,7 +362,7 @@ thread_jump (int mode, edge e, basic_block b)
processing as if it were same basic block.
Our goal is to prove that whole block is an NOOP. */
for (insn = NEXT_INSN (b->head); insn != NEXT_INSN (b->end) && !failed;
for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b)) && !failed;
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
@ -509,7 +509,7 @@ try_forward_edges (int mode, basic_block b)
if ((mode & CLEANUP_PRE_LOOP) && optimize)
{
rtx insn = (target->succ->flags & EDGE_FALLTHRU
? target->head : prev_nonnote_insn (target->end));
? BB_HEAD (target) : prev_nonnote_insn (BB_END (target)));
if (GET_CODE (insn) != NOTE)
insn = NEXT_INSN (insn);
@ -527,7 +527,7 @@ try_forward_edges (int mode, basic_block b)
at this time; it can mess up the loop optimizer's
recognition of some patterns. */
insn = PREV_INSN (target->head);
insn = PREV_INSN (BB_HEAD (target));
if (insn && GET_CODE (insn) == NOTE
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
break;
@ -696,7 +696,7 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
{
rtx barrier;
barrier = next_nonnote_insn (a->end);
barrier = next_nonnote_insn (BB_END (a));
if (GET_CODE (barrier) != BARRIER)
abort ();
delete_insn (barrier);
@ -708,12 +708,12 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
and adjust the block trees appropriately. Even better would be to have
a tighter connection between block trees and rtl so that this is not
necessary. */
if (squeeze_notes (&a->head, &a->end))
if (squeeze_notes (&BB_HEAD (a), &BB_END (a)))
abort ();
/* Scramble the insn chain. */
if (a->end != PREV_INSN (b->head))
reorder_insns_nobb (a->head, a->end, PREV_INSN (b->head));
if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
a->flags |= BB_DIRTY;
if (rtl_dump_file)
@ -739,18 +739,18 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
rtx barrier, real_b_end;
rtx label, table;
real_b_end = b->end;
real_b_end = BB_END (b);
/* If there is a jump table following block B temporarily add the jump table
to block B so that it will also be moved to the correct location. */
if (tablejump_p (b->end, &label, &table)
&& prev_active_insn (label) == b->end)
if (tablejump_p (BB_END (b), &label, &table)
&& prev_active_insn (label) == BB_END (b))
{
b->end = table;
BB_END (b) = table;
}
/* There had better have been a barrier there. Delete it. */
barrier = NEXT_INSN (b->end);
barrier = NEXT_INSN (BB_END (b));
if (barrier && GET_CODE (barrier) == BARRIER)
delete_insn (barrier);
@ -761,14 +761,14 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
and adjust the block trees appropriately. Even better would be to have
a tighter connection between block trees and rtl so that this is not
necessary. */
if (squeeze_notes (&b->head, &b->end))
if (squeeze_notes (&BB_HEAD (b), &BB_END (b)))
abort ();
/* Scramble the insn chain. */
reorder_insns_nobb (b->head, b->end, a->end);
reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
/* Restore the real end of b. */
b->end = real_b_end;
BB_END (b) = real_b_end;
if (rtl_dump_file)
fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
@ -799,8 +799,8 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
that would make optimize_sibling_and_tail_recursive_calls more
complex for no gain. */
if ((mode & CLEANUP_PRE_SIBCALL)
&& GET_CODE (c->head) == CODE_LABEL
&& tail_recursion_label_p (c->head))
&& GET_CODE (BB_HEAD (c)) == CODE_LABEL
&& tail_recursion_label_p (BB_HEAD (c)))
return NULL;
/* If B has a fallthru edge to C, no need to move anything. */
@ -1010,7 +1010,7 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
/* Skip simple jumps at the end of the blocks. Complex jumps still
need to be compared for equivalence, which we'll do below. */
i1 = bb1->end;
i1 = BB_END (bb1);
last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
if (onlyjump_p (i1)
|| (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
@ -1019,7 +1019,7 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
i1 = PREV_INSN (i1);
}
i2 = bb2->end;
i2 = BB_END (bb2);
if (onlyjump_p (i2)
|| (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
{
@ -1033,13 +1033,13 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
while (true)
{
/* Ignore notes. */
while (!INSN_P (i1) && i1 != bb1->head)
while (!INSN_P (i1) && i1 != BB_HEAD (bb1))
i1 = PREV_INSN (i1);
while (!INSN_P (i2) && i2 != bb2->head)
while (!INSN_P (i2) && i2 != BB_HEAD (bb2))
i2 = PREV_INSN (i2);
if (i1 == bb1->head || i2 == bb2->head)
if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2))
break;
if (!insns_match_p (mode, i1, i2))
@ -1085,16 +1085,16 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
Two, it keeps line number notes as matched as may be. */
if (ninsns)
{
while (last1 != bb1->head && !INSN_P (PREV_INSN (last1)))
while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1)))
last1 = PREV_INSN (last1);
if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
if (last1 != BB_HEAD (bb1) && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
last1 = PREV_INSN (last1);
while (last2 != bb2->head && !INSN_P (PREV_INSN (last2)))
while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2)))
last2 = PREV_INSN (last2);
if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
if (last2 != BB_HEAD (bb2) && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
last2 = PREV_INSN (last2);
*f1 = last1;
@ -1121,18 +1121,18 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
unconditional jump, or a fake edge to exit. */
if (bb1->succ && !bb1->succ->succ_next
&& (bb1->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
&& (GET_CODE (bb1->end) != JUMP_INSN || simplejump_p (bb1->end)))
&& (GET_CODE (BB_END (bb1)) != JUMP_INSN || simplejump_p (BB_END (bb1))))
return (bb2->succ && !bb2->succ->succ_next
&& (bb2->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
&& (GET_CODE (bb2->end) != JUMP_INSN || simplejump_p (bb2->end)));
&& (GET_CODE (BB_END (bb2)) != JUMP_INSN || simplejump_p (BB_END (bb2))));
/* Match conditional jumps - this may get tricky when fallthru and branch
edges are crossed. */
if (bb1->succ
&& bb1->succ->succ_next
&& !bb1->succ->succ_next->succ_next
&& any_condjump_p (bb1->end)
&& onlyjump_p (bb1->end))
&& any_condjump_p (BB_END (bb1))
&& onlyjump_p (BB_END (bb1)))
{
edge b1, f1, b2, f2;
bool reverse, match;
@ -1142,8 +1142,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
if (!bb2->succ
|| !bb2->succ->succ_next
|| bb2->succ->succ_next->succ_next
|| !any_condjump_p (bb2->end)
|| !onlyjump_p (bb2->end))
|| !any_condjump_p (BB_END (bb2))
|| !onlyjump_p (BB_END (bb2)))
return false;
b1 = BRANCH_EDGE (bb1);
@ -1175,8 +1175,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
else
return false;
set1 = pc_set (bb1->end);
set2 = pc_set (bb2->end);
set1 = pc_set (BB_END (bb1));
set2 = pc_set (BB_END (bb2));
if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
!= (XEXP (SET_SRC (set2), 1) == pc_rtx))
reverse = !reverse;
@ -1185,7 +1185,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
cond2 = XEXP (SET_SRC (set2), 0);
code1 = GET_CODE (cond1);
if (reverse)
code2 = reversed_comparison_code (cond2, bb2->end);
code2 = reversed_comparison_code (cond2, BB_END (bb2));
else
code2 = GET_CODE (cond2);
@ -1250,8 +1250,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
rtx label1, label2;
rtx table1, table2;
if (tablejump_p (bb1->end, &label1, &table1)
&& tablejump_p (bb2->end, &label2, &table2)
if (tablejump_p (BB_END (bb1), &label1, &table1)
&& tablejump_p (BB_END (bb2), &label2, &table2)
&& GET_CODE (PATTERN (table1)) == GET_CODE (PATTERN (table2)))
{
/* The labels should never be the same rtx. If they really are same
@ -1261,7 +1261,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
/* If LABEL2 is referenced in BB1->END do not do anything
because we would loose information when replacing
LABEL1 by LABEL2 and then LABEL2 by LABEL1 in BB1->END. */
if (label1 != label2 && !rtx_referenced_p (label2, bb1->end))
if (label1 != label2 && !rtx_referenced_p (label2, BB_END (bb1)))
{
/* Set IDENTICAL to true when the tables are identical. */
bool identical = false;
@ -1296,9 +1296,9 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
rr.r1 = label1;
rr.r2 = label2;
rr.update_label_nuses = false;
for_each_rtx (&bb1->end, replace_label, &rr);
for_each_rtx (&BB_END (bb1), replace_label, &rr);
match = insns_match_p (mode, bb1->end, bb2->end);
match = insns_match_p (mode, BB_END (bb1), BB_END (bb2));
if (rtl_dump_file && match)
fprintf (rtl_dump_file,
"Tablejumps in bb %i and %i match.\n",
@ -1309,7 +1309,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
from the instruction is deleted too. */
rr.r1 = label2;
rr.r2 = label1;
for_each_rtx (&bb1->end, replace_label, &rr);
for_each_rtx (&BB_END (bb1), replace_label, &rr);
return match;
}
@ -1321,7 +1321,7 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
/* First ensure that the instructions match. There may be many outgoing
edges so this test is generally cheaper. */
if (!insns_match_p (mode, bb1->end, bb2->end))
if (!insns_match_p (mode, BB_END (bb1), BB_END (bb2)))
return false;
/* Search the outgoing edges, ensure that the counts do match, find possible
@ -1362,8 +1362,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
/* Ensure the same EH region. */
{
rtx n1 = find_reg_note (bb1->end, REG_EH_REGION, 0);
rtx n2 = find_reg_note (bb2->end, REG_EH_REGION, 0);
rtx n1 = find_reg_note (BB_END (bb1), REG_EH_REGION, 0);
rtx n2 = find_reg_note (BB_END (bb2), REG_EH_REGION, 0);
if (!n1 && n2)
return false;
@ -1443,8 +1443,8 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
rtx label1, label2;
rtx table1, table2;
if (tablejump_p (src1->end, &label1, &table1)
&& tablejump_p (src2->end, &label2, &table2)
if (tablejump_p (BB_END (src1), &label1, &table1)
&& tablejump_p (BB_END (src2), &label2, &table2)
&& label1 != label2)
{
replace_label_data rr;
@ -1459,7 +1459,7 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
/* Do not replace the label in SRC1->END because when deleting
a block whose end is a tablejump, the tablejump referenced
from the instruction is deleted too. */
if (insn != src1->end)
if (insn != BB_END (src1))
for_each_rtx (&insn, replace_label, &rr);
}
}
@ -1467,7 +1467,7 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
#endif
/* Avoid splitting if possible. */
if (newpos2 == src2->head)
if (newpos2 == BB_HEAD (src2))
redirect_to = src2;
else
{
@ -1716,9 +1716,9 @@ try_optimize_cfg (int mode)
if (b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& !(b->pred->flags & EDGE_COMPLEX)
&& GET_CODE (b->head) == CODE_LABEL
&& GET_CODE (BB_HEAD (b)) == CODE_LABEL
&& (!(mode & CLEANUP_PRE_SIBCALL)
|| !tail_recursion_label_p (b->head))
|| !tail_recursion_label_p (BB_HEAD (b)))
/* If the previous block ends with a branch to this
block, we can't delete the label. Normally this
is a condjump that is yet to be simplified, but
@ -1726,21 +1726,21 @@ try_optimize_cfg (int mode)
some element going to the same place as the
default (fallthru). */
&& (b->pred->src == ENTRY_BLOCK_PTR
|| GET_CODE (b->pred->src->end) != JUMP_INSN
|| ! label_is_jump_target_p (b->head,
b->pred->src->end)))
|| GET_CODE (BB_END (b->pred->src)) != JUMP_INSN
|| ! label_is_jump_target_p (BB_HEAD (b),
BB_END (b->pred->src))))
{
rtx label = b->head;
rtx label = BB_HEAD (b);
delete_insn_chain (label, label);
/* In the case label is undeletable, move it after the
BASIC_BLOCK note. */
if (NOTE_LINE_NUMBER (b->head) == NOTE_INSN_DELETED_LABEL)
if (NOTE_LINE_NUMBER (BB_HEAD (b)) == NOTE_INSN_DELETED_LABEL)
{
rtx bb_note = NEXT_INSN (b->head);
rtx bb_note = NEXT_INSN (BB_HEAD (b));
reorder_insns_nobb (label, label, bb_note);
b->head = bb_note;
BB_HEAD (b) = bb_note;
}
if (rtl_dump_file)
fprintf (rtl_dump_file, "Deleted label in block %i.\n",
@ -1751,7 +1751,7 @@ try_optimize_cfg (int mode)
if (!(mode & CLEANUP_CFGLAYOUT)
&& b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& GET_CODE (b->head) != CODE_LABEL
&& GET_CODE (BB_HEAD (b)) != CODE_LABEL
&& FORWARDER_BLOCK_P (b)
/* Note that forwarder_block_p true ensures that
there is a successor for this block. */
@ -1793,10 +1793,10 @@ try_optimize_cfg (int mode)
else if (!(mode & CLEANUP_CFGLAYOUT)
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (b->end) != JUMP_INSN
&& (GET_CODE (BB_END (b)) != JUMP_INSN
|| (flow2_completed
? simplejump_p (b->end)
: onlyjump_p (b->end)))
? simplejump_p (BB_END (b))
: onlyjump_p (BB_END (b))))
&& (next = merge_blocks_move (s, b, c, mode)))
{
b = next;
@ -1818,7 +1818,7 @@ try_optimize_cfg (int mode)
if (b->succ
&& ! b->succ->succ_next
&& b->succ->dest != EXIT_BLOCK_PTR
&& onlyjump_p (b->end)
&& onlyjump_p (BB_END (b))
&& redirect_edge_and_branch (b->succ, b->succ->dest))
{
update_forwarder_flag (b);

View File

@ -89,9 +89,9 @@ skip_insns_after_block (basic_block bb)
next_head = NULL_RTX;
if (bb->next_bb != EXIT_BLOCK_PTR)
next_head = bb->next_bb->head;
next_head = BB_HEAD (bb->next_bb);
for (last_insn = insn = bb->end; (insn = NEXT_INSN (insn)) != 0; )
for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
{
if (insn == next_head)
break;
@ -148,7 +148,7 @@ skip_insns_after_block (basic_block bb)
created by removing the basic block originally following
NOTE_INSN_LOOP_BEG. In such case reorder the notes. */
for (insn = last_insn; insn != bb->end; insn = prev)
for (insn = last_insn; insn != BB_END (bb); insn = prev)
{
prev = PREV_INSN (insn);
if (GET_CODE (insn) == NOTE)
@ -172,7 +172,7 @@ skip_insns_after_block (basic_block bb)
static rtx
label_for_bb (basic_block bb)
{
rtx label = bb->head;
rtx label = BB_HEAD (bb);
if (GET_CODE (label) != CODE_LABEL)
{
@ -214,13 +214,13 @@ record_effective_endpoints (void)
{
rtx end;
if (PREV_INSN (bb->head) && next_insn != bb->head)
if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
bb->rbi->header = unlink_insn_chain (next_insn,
PREV_INSN (bb->head));
PREV_INSN (BB_HEAD (bb)));
end = skip_insns_after_block (bb);
if (NEXT_INSN (bb->end) && bb->end != end)
bb->rbi->footer = unlink_insn_chain (NEXT_INSN (bb->end), end);
next_insn = NEXT_INSN (bb->end);
if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
bb->rbi->footer = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
next_insn = NEXT_INSN (BB_END (bb));
}
cfg_layout_function_footer = next_insn;
@ -598,11 +598,11 @@ fixup_reorder_chain (void)
insn = NEXT_INSN (insn);
}
if (insn)
NEXT_INSN (insn) = bb->head;
NEXT_INSN (insn) = BB_HEAD (bb);
else
set_first_insn (bb->head);
PREV_INSN (bb->head) = insn;
insn = bb->end;
set_first_insn (BB_HEAD (bb));
PREV_INSN (BB_HEAD (bb)) = insn;
insn = BB_END (bb);
if (bb->rbi->footer)
{
NEXT_INSN (insn) = bb->rbi->footer;
@ -649,7 +649,7 @@ fixup_reorder_chain (void)
else if (! (e->flags & EDGE_EH))
e_taken = e;
bb_end_insn = bb->end;
bb_end_insn = BB_END (bb);
if (GET_CODE (bb_end_insn) == JUMP_INSN)
{
if (any_condjump_p (bb_end_insn))
@ -675,9 +675,9 @@ fixup_reorder_chain (void)
e_fake = unchecked_make_edge (bb, e_fall->dest, 0);
if (!redirect_jump (bb->end, block_label (bb), 0))
if (!redirect_jump (BB_END (bb), block_label (bb), 0))
abort ();
note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (note)
{
int prob = INTVAL (XEXP (note, 0));
@ -784,7 +784,7 @@ fixup_reorder_chain (void)
if (bb->rbi->original)
fprintf (rtl_dump_file, "duplicate of %i ",
bb->rbi->original->index);
else if (forwarder_block_p (bb) && GET_CODE (bb->head) != CODE_LABEL)
else if (forwarder_block_p (bb) && GET_CODE (BB_HEAD (bb)) != CODE_LABEL)
fprintf (rtl_dump_file, "compensation ");
else
fprintf (rtl_dump_file, "bb %i ", bb->index);
@ -896,18 +896,18 @@ cfg_layout_can_duplicate_bb_p (basic_block bb)
/* Do not attempt to duplicate tablejumps, as we need to unshare
the dispatch table. This is difficult to do, as the instructions
computing jump destination may be hoisted outside the basic block. */
if (tablejump_p (bb->end, NULL, NULL))
if (tablejump_p (BB_END (bb), NULL, NULL))
return false;
/* Do not duplicate blocks containing insns that can't be copied. */
if (targetm.cannot_copy_insn_p)
{
rtx insn = bb->head;
rtx insn = BB_HEAD (bb);
while (1)
{
if (INSN_P (insn) && (*targetm.cannot_copy_insn_p) (insn))
return false;
if (insn == bb->end)
if (insn == BB_END (bb))
break;
insn = NEXT_INSN (insn);
}
@ -1028,7 +1028,7 @@ cfg_layout_duplicate_bb (basic_block bb, edge e)
abort ();
#endif
insn = duplicate_insn_chain (bb->head, bb->end);
insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
new_bb = create_basic_block (insn,
insn ? get_last_insn () : NULL,
EXIT_BLOCK_PTR->prev_bb);

View File

@ -588,7 +588,7 @@ make_forwarder_block (basic_block bb, int redirect_latch, int redirect_nonlatch,
insn = PREV_INSN (first_insn_after_basic_block_note (bb));
/* For empty block split_block will return NULL. */
if (bb->end == insn)
if (BB_END (bb) == insn)
emit_note_after (NOTE_INSN_DELETED, insn);
fallthru = split_block (bb, insn);

View File

@ -112,8 +112,8 @@ blocks_invariant_registers (basic_block *bbs, int nbbs, regset regs)
for (i = 0; i < max_reg_num (); i++)
SET_REGNO_REG_SET (regs, i);
for (i = 0; i < nbbs; i++)
for (insn = bbs[i]->head;
insn != NEXT_INSN (bbs[i]->end);
for (insn = BB_HEAD (bbs[i]);
insn != NEXT_INSN (BB_END (bbs[i]));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
note_stores (PATTERN (insn),
@ -157,8 +157,8 @@ blocks_single_set_registers (basic_block *bbs, int nbbs, rtx *regs)
regs[i] = NULL;
for (i = 0; i < nbbs; i++)
for (insn = bbs[i]->head;
insn != NEXT_INSN (bbs[i]->end);
for (insn = BB_HEAD (bbs[i]);
insn != NEXT_INSN (BB_END (bbs[i]));
insn = NEXT_INSN (insn))
{
rtx set = single_set (insn);
@ -171,8 +171,8 @@ blocks_single_set_registers (basic_block *bbs, int nbbs, rtx *regs)
data.regs = regs;
for (i = 0; i < nbbs; i++)
for (insn = bbs[i]->head;
insn != NEXT_INSN (bbs[i]->end);
for (insn = BB_HEAD (bbs[i]);
insn != NEXT_INSN (BB_END (bbs[i]));
insn = NEXT_INSN (insn))
{
if (!INSN_P (insn))
@ -360,12 +360,12 @@ simple_increment (struct loops *loops, struct loop *loop,
if (mod_bb1 == mod_bb)
{
for (;
mod_insn != PREV_INSN (mod_bb->head);
mod_insn != PREV_INSN (BB_HEAD (mod_bb));
mod_insn = PREV_INSN (mod_insn))
if (mod_insn == mod_insn1)
break;
if (mod_insn == PREV_INSN (mod_bb->head))
if (mod_insn == PREV_INSN (BB_HEAD (mod_bb)))
return NULL;
}
@ -416,7 +416,7 @@ variable_initial_value (rtx insn, regset invariant_regs,
bb = BLOCK_FOR_INSN (insn);
while (1)
{
for (; insn != bb->head; insn = PREV_INSN (insn))
for (; insn != BB_HEAD (bb); insn = PREV_INSN (insn))
{
if (INSN_P (insn))
note_stores (PATTERN (insn),
@ -426,7 +426,7 @@ variable_initial_value (rtx insn, regset invariant_regs,
break;
}
if (insn != bb->head)
if (insn != BB_HEAD (bb))
{
/* We found place where var is set. */
rtx set_dest;
@ -471,7 +471,7 @@ variable_initial_value (rtx insn, regset invariant_regs,
return NULL;
bb = bb->pred->src;
insn = bb->end;
insn = BB_END (bb);
}
return NULL;
@ -496,7 +496,7 @@ variable_initial_values (edge e, rtx var, enum machine_mode inner_mode)
if (e->src == ENTRY_BLOCK_PTR)
return list;
set_insn = e->src->end;
set_insn = BB_END (e->src);
while (REG_P (var)
&& (var = variable_initial_value (set_insn, invariant_regs, var,
&set_insn, inner_mode)))
@ -983,7 +983,7 @@ simple_loop_exit_p (struct loops *loops, struct loop *loop, edge exit_edge,
return false;
/* It must end in a simple conditional jump. */
if (!any_condjump_p (exit_bb->end))
if (!any_condjump_p (BB_END (exit_bb)))
return false;
ei = exit_bb->succ;
@ -995,7 +995,7 @@ simple_loop_exit_p (struct loops *loops, struct loop *loop, edge exit_edge,
/* Condition must be a simple comparison in that one of operands
is register and the other one is invariant. */
if (!(condition = get_condition (exit_bb->end, NULL, false)))
if (!(condition = get_condition (BB_END (exit_bb), NULL, false)))
return false;
if (!simple_condition_p (loop, condition, invariant_regs, desc))
@ -1323,7 +1323,7 @@ num_loop_insns (struct loop *loop)
{
bb = bbs[i];
ninsns++;
for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
if (INSN_P (insn))
ninsns++;
}
@ -1347,7 +1347,7 @@ average_num_loop_insns (struct loop *loop)
bb = bbs[i];
binsns = 1;
for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
if (INSN_P (insn))
binsns++;

View File

@ -788,7 +788,7 @@ loop_delete_branch_edge (edge e, int really_delete)
if (src->succ->succ_next->succ_next)
return false;
/* And it must be just a simple branch. */
if (!any_condjump_p (src->end))
if (!any_condjump_p (BB_END (src)))
return false;
snd = e == src->succ ? src->succ->succ_next : src->succ;
@ -1126,7 +1126,7 @@ create_preheader (struct loop *loop, dominance_info dom, int flags)
insn = PREV_INSN (insn);
else
insn = get_last_insn ();
if (insn == loop->header->end)
if (insn == BB_END (loop->header))
{
/* Split_block would not split block after its end. */
emit_note_after (NOTE_INSN_DELETED, insn);
@ -1243,7 +1243,7 @@ loop_split_edge_with (edge e, rtx insns, struct loops *loops)
}
if (insns)
emit_insn_after (insns, new_bb->end);
emit_insn_after (insns, BB_END (new_bb));
set_immediate_dominator (loops->cfg.dom, new_bb, src);
set_immediate_dominator (loops->cfg.dom, dest,

View File

@ -199,7 +199,7 @@ delete_insn_and_edges (rtx insn)
if (INSN_P (insn)
&& BLOCK_FOR_INSN (insn)
&& BLOCK_FOR_INSN (insn)->end == insn)
&& BB_END (BLOCK_FOR_INSN (insn)) == insn)
purge = true;
x = delete_insn (insn);
if (purge)
@ -240,7 +240,7 @@ delete_insn_chain_and_edges (rtx first, rtx last)
if (INSN_P (last)
&& BLOCK_FOR_INSN (last)
&& BLOCK_FOR_INSN (last)->end == last)
&& BB_END (BLOCK_FOR_INSN (last)) == last)
purge = true;
delete_insn_chain (first, last);
if (purge)
@ -310,8 +310,8 @@ create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
if (NEXT_INSN (end) == bb_note)
end = bb_note;
bb->head = head;
bb->end = end;
BB_HEAD (bb) = head;
BB_END (bb) = end;
bb->index = last_basic_block++;
bb->flags = BB_NEW;
link_block (bb, after);
@ -378,7 +378,7 @@ rtl_delete_block (basic_block b)
/* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
hanging before the block. */
for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) != NOTE)
break;
@ -387,15 +387,15 @@ rtl_delete_block (basic_block b)
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
}
insn = b->head;
insn = BB_HEAD (b);
never_reached_warning (insn, b->end);
never_reached_warning (insn, BB_END (b));
if (GET_CODE (insn) == CODE_LABEL)
maybe_remove_eh_handler (insn);
/* Include any jump table following the basic block. */
end = b->end;
end = BB_END (b);
if (tablejump_p (end, NULL, &tmp))
end = tmp;
@ -405,7 +405,7 @@ rtl_delete_block (basic_block b)
end = tmp;
/* Selectively delete the entire chain. */
b->head = NULL;
BB_HEAD (b) = NULL;
delete_insn_chain (insn, end);
/* Remove the edges into and out of this block. Note that there may
@ -431,10 +431,10 @@ compute_bb_for_insn (void)
FOR_EACH_BB (bb)
{
rtx end = bb->end;
rtx end = BB_END (bb);
rtx insn;
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
BLOCK_FOR_INSN (insn) = bb;
if (insn == end)
@ -461,11 +461,11 @@ update_bb_for_insn (basic_block bb)
{
rtx insn;
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) != BARRIER)
set_block_for_insn (insn, bb);
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}
@ -484,15 +484,15 @@ rtl_split_block (basic_block bb, void *insnp)
rtx insn = insnp;
/* There is no point splitting the block after its end. */
if (bb->end == insn)
if (BB_END (bb) == insn)
return 0;
/* Create the new basic block. */
new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
new_bb->count = bb->count;
new_bb->frequency = bb->frequency;
new_bb->loop_depth = bb->loop_depth;
bb->end = insn;
BB_END (bb) = insn;
/* Redirect the outgoing edges. */
new_bb->succ = bb->succ;
@ -564,7 +564,7 @@ update_cfg_after_block_merging (basic_block a, basic_block b)
static void
rtl_merge_blocks (basic_block a, basic_block b)
{
rtx b_head = b->head, b_end = b->end, a_end = a->end;
rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
rtx del_first = NULL_RTX, del_last = NULL_RTX;
int b_empty = 0;
@ -601,7 +601,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
if (GET_CODE (prev) != NOTE
|| NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
|| prev == a->head)
|| prev == BB_HEAD (a))
break;
del_first = a_end;
@ -615,7 +615,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
prev = prev_nonnote_insn (prev);
if (!prev)
prev = a->head;
prev = BB_HEAD (a);
del_first = tmp;
}
#endif
@ -644,7 +644,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
a_end = b_end;
}
a->end = a_end;
BB_END (a) = a_end;
}
/* Return true when block A and B can be merged. */
@ -660,9 +660,9 @@ rtl_can_merge_blocks (basic_block a,basic_block b)
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (a->end) != JUMP_INSN
&& (GET_CODE (BB_END (a)) != JUMP_INSN
|| (flow2_completed
? simplejump_p (a->end) : onlyjump_p (a->end))));
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
/* Return the label in the head of basic block BLOCK. Create one if it doesn't
@ -674,12 +674,12 @@ block_label (basic_block block)
if (block == EXIT_BLOCK_PTR)
return NULL_RTX;
if (GET_CODE (block->head) != CODE_LABEL)
if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
{
block->head = emit_label_before (gen_label_rtx (), block->head);
BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
}
return block->head;
return BB_HEAD (block);
}
/* Attempt to perform edge redirection by replacing possibly complex jump
@ -691,7 +691,7 @@ static bool
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
{
basic_block src = e->src;
rtx insn = src->end, kill_from;
rtx insn = BB_END (src), kill_from;
edge tmp;
rtx set;
int fallthru = 0;
@ -731,7 +731,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
{
rtx insn = src->rbi->footer;
delete_insn_chain (kill_from, src->end);
delete_insn_chain (kill_from, BB_END (src));
/* Remove barriers but keep jumptables. */
while (insn)
@ -751,7 +751,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
}
}
else
delete_insn_chain (kill_from, PREV_INSN (target->head));
delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
}
/* If this already is simplejump, redirect it. */
@ -781,11 +781,11 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
rtx barrier, label, table;
emit_jump_insn_after (gen_jump (target_label), insn);
JUMP_LABEL (src->end) = target_label;
JUMP_LABEL (BB_END (src)) = target_label;
LABEL_NUSES (target_label)++;
if (rtl_dump_file)
fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
INSN_UID (insn), INSN_UID (src->end));
INSN_UID (insn), INSN_UID (BB_END (src)));
delete_insn_chain (kill_from, insn);
@ -796,20 +796,20 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
if (tablejump_p (insn, &label, &table))
delete_insn_chain (label, table);
barrier = next_nonnote_insn (src->end);
barrier = next_nonnote_insn (BB_END (src));
if (!barrier || GET_CODE (barrier) != BARRIER)
emit_barrier_after (src->end);
emit_barrier_after (BB_END (src));
else
{
if (barrier != NEXT_INSN (src->end))
if (barrier != NEXT_INSN (BB_END (src)))
{
/* Move the jump before barrier so that the notes
which originally were or were created before jump table are
inside the basic block. */
rtx new_insn = src->end;
rtx new_insn = BB_END (src);
rtx tmp;
for (tmp = NEXT_INSN (src->end); tmp != barrier;
for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
tmp = NEXT_INSN (tmp))
set_block_for_insn (tmp, src);
@ -839,9 +839,9 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
/* We don't want a block to end on a line-number note since that has
the potential of changing the code between -g and not -g. */
while (GET_CODE (e->src->end) == NOTE
&& NOTE_LINE_NUMBER (e->src->end) >= 0)
delete_insn (e->src->end);
while (GET_CODE (BB_END (e->src)) == NOTE
&& NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
delete_insn (BB_END (e->src));
if (e->dest != target)
redirect_edge_succ (e, target);
@ -876,9 +876,9 @@ static bool
redirect_branch_edge (edge e, basic_block target)
{
rtx tmp;
rtx old_label = e->dest->head;
rtx old_label = BB_HEAD (e->dest);
basic_block src = e->src;
rtx insn = src->end;
rtx insn = BB_END (src);
/* We can only redirect non-fallthru edges of jump insn. */
if (e->flags & EDGE_FALLTHRU)
@ -1000,18 +1000,18 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
instruction, first redirect the jump itself and then continue
by creating a basic block afterwards to redirect fallthru edge. */
if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
&& any_condjump_p (e->src->end)
&& any_condjump_p (BB_END (e->src))
/* When called from cfglayout, fallthru edges do not
necessarily go to the next block. */
&& e->src->next_bb == e->dest
&& JUMP_LABEL (e->src->end) == e->dest->head)
&& JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
{
rtx note;
edge b = unchecked_make_edge (e->src, target, 0);
if (!redirect_jump (e->src->end, block_label (target), 0))
if (!redirect_jump (BB_END (e->src), block_label (target), 0))
abort ();
note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
if (note)
{
int prob = INTVAL (XEXP (note, 0));
@ -1046,7 +1046,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
/* We can't redirect the entry block. Create an empty block at the
start of the function which we use to add the new jump. */
edge *pe1;
basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
/* Change the existing edge's source to be the new block, and add
a new edge from the entry block to the new block. */
@ -1069,8 +1069,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
/* If the old block ended with a tablejump, skip its table
by searching forward from there. Otherwise start searching
forward from the last instruction of the old block. */
if (!tablejump_p (e->src->end, NULL, &note))
note = e->src->end;
if (!tablejump_p (BB_END (e->src), NULL, &note))
note = BB_END (e->src);
/* Position the new block correctly relative to loop notes. */
note = last_loop_beg_note (note);
@ -1111,19 +1111,19 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
if (target == EXIT_BLOCK_PTR)
{
if (HAVE_return)
emit_jump_insn_after (gen_return (), jump_block->end);
emit_jump_insn_after (gen_return (), BB_END (jump_block));
else
abort ();
}
else
{
rtx label = block_label (target);
emit_jump_insn_after (gen_jump (label), jump_block->end);
JUMP_LABEL (jump_block->end) = label;
emit_jump_insn_after (gen_jump (label), BB_END (jump_block));
JUMP_LABEL (BB_END (jump_block)) = label;
LABEL_NUSES (label)++;
}
emit_barrier_after (jump_block->end);
emit_barrier_after (BB_END (jump_block));
redirect_edge_succ_nodup (e, target);
if (abnormal_edge_flags)
@ -1176,14 +1176,14 @@ tidy_fallthru_edge (edge e, basic_block b, basic_block c)
So search through a sequence of barriers, labels, and notes for
the head of block C and assert that we really do fall through. */
for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
if (INSN_P (q))
return;
/* Remove what will soon cease being the jump insn from the source block.
If block B consisted only of this single jump, turn it into a deleted
note. */
q = b->end;
q = BB_END (b);
if (GET_CODE (q) == JUMP_INSN
&& onlyjump_p (q)
&& (any_uncondjump_p (q)
@ -1205,8 +1205,8 @@ tidy_fallthru_edge (edge e, basic_block b, basic_block c)
}
/* Selectively unlink the sequence. */
if (q != PREV_INSN (c->head))
delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
if (q != PREV_INSN (BB_HEAD (c)))
delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
e->flags |= EDGE_FALLTHRU;
}
@ -1247,8 +1247,8 @@ tidy_fallthru_edges (void)
&& s->succ_next == NULL
&& s->dest == c
/* If the jump insn has side effects, we can't tidy the edge. */
&& (GET_CODE (b->end) != JUMP_INSN
|| onlyjump_p (b->end)))
&& (GET_CODE (BB_END (b)) != JUMP_INSN
|| onlyjump_p (BB_END (b))))
tidy_fallthru_edge (s, b, c);
}
}
@ -1274,7 +1274,7 @@ back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
if (!bb)
return false;
for (insn = bb1->end; insn != bb2->head && count >= 0;
for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE)
{
@ -1337,14 +1337,14 @@ rtl_split_edge (edge edge_in)
we confuse the loop optimizer into thinking the loop is a phony. */
if (edge_in->dest != EXIT_BLOCK_PTR
&& PREV_INSN (edge_in->dest->head)
&& GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
&& (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
&& PREV_INSN (BB_HEAD (edge_in->dest))
&& GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
&& (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
== NOTE_INSN_LOOP_BEG)
&& !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
before = PREV_INSN (edge_in->dest->head);
before = PREV_INSN (BB_HEAD (edge_in->dest));
else if (edge_in->dest != EXIT_BLOCK_PTR)
before = edge_in->dest->head;
before = BB_HEAD (edge_in->dest);
else
before = NULL_RTX;
@ -1512,11 +1512,11 @@ commit_one_edge_insertion (edge e, int watch_calls)
its return value. */
if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
&& e->src != ENTRY_BLOCK_PTR
&& GET_CODE (e->src->end) == CALL_INSN)
&& GET_CODE (BB_END (e->src)) == CALL_INSN)
{
rtx next = next_nonnote_insn (e->src->end);
rtx next = next_nonnote_insn (BB_END (e->src));
after = e->dest->head;
after = BB_HEAD (e->dest);
/* The first insn after the call may be a stack pop, skip it. */
while (next
&& keep_with_call_p (next))
@ -1536,12 +1536,12 @@ commit_one_edge_insertion (edge e, int watch_calls)
/* Get the location correct wrt a code label, and "nice" wrt
a basic block note, and before everything else. */
tmp = bb->head;
tmp = BB_HEAD (bb);
if (GET_CODE (tmp) == CODE_LABEL)
tmp = NEXT_INSN (tmp);
if (NOTE_INSN_BASIC_BLOCK_P (tmp))
tmp = NEXT_INSN (tmp);
if (tmp == bb->head)
if (tmp == BB_HEAD (bb))
before = tmp;
else if (tmp)
after = PREV_INSN (tmp);
@ -1563,8 +1563,8 @@ commit_one_edge_insertion (edge e, int watch_calls)
We know this block has a single successor, so we can just emit
the queued insns before the jump. */
if (GET_CODE (bb->end) == JUMP_INSN)
for (before = bb->end;
if (GET_CODE (BB_END (bb)) == JUMP_INSN)
for (before = BB_END (bb);
GET_CODE (PREV_INSN (before)) == NOTE
&& NOTE_LINE_NUMBER (PREV_INSN (before)) ==
NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
@ -1575,14 +1575,14 @@ commit_one_edge_insertion (edge e, int watch_calls)
if ((e->flags & EDGE_FALLTHRU) == 0)
abort ();
after = bb->end;
after = BB_END (bb);
}
}
/* Otherwise we must split the edge. */
else
{
bb = split_edge (e);
after = bb->end;
after = BB_END (bb);
}
}
@ -1728,7 +1728,7 @@ rtl_dump_bb (basic_block bb, FILE *outf)
dump_regset (bb->global_live_at_start, outf);
putc ('\n', outf);
for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
insn = NEXT_INSN (insn))
print_rtl_single (outf, insn);
@ -1761,9 +1761,9 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
{
rtx x;
start[INSN_UID (bb->head)] = bb;
end[INSN_UID (bb->end)] = bb;
for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
start[INSN_UID (BB_HEAD (bb))] = bb;
end[INSN_UID (BB_END (bb))] = bb;
for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
{
enum bb_state state = IN_MULTIPLE_BB;
@ -1771,7 +1771,7 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
state = IN_ONE_BB;
in_bb_p[INSN_UID (x)] = state;
if (x == bb->end)
if (x == BB_END (bb))
break;
}
}
@ -1827,9 +1827,9 @@ void
update_br_prob_note (basic_block bb)
{
rtx note;
if (GET_CODE (bb->end) != JUMP_INSN)
if (GET_CODE (BB_END (bb)) != JUMP_INSN)
return;
note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
return;
XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
@ -1866,8 +1866,8 @@ rtl_verify_flow_info_1 (void)
FOR_EACH_BB_REVERSE (bb)
{
rtx head = bb->head;
rtx end = bb->end;
rtx head = BB_HEAD (bb);
rtx end = BB_END (bb);
/* Verify the end of the basic block is in the INSN chain. */
for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
@ -1917,10 +1917,10 @@ rtl_verify_flow_info_1 (void)
edge e, fallthru = NULL;
rtx note;
if (INSN_P (bb->end)
&& (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
if (INSN_P (BB_END (bb))
&& (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
&& bb->succ && bb->succ->succ_next
&& any_condjump_p (bb->end))
&& any_condjump_p (BB_END (bb)))
{
if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
{
@ -1949,52 +1949,52 @@ rtl_verify_flow_info_1 (void)
n_abnormal++;
}
if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
&& !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
&& !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
{
error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
err = 1;
}
if (n_branch
&& (GET_CODE (bb->end) != JUMP_INSN
|| (n_branch > 1 && (any_uncondjump_p (bb->end)
|| any_condjump_p (bb->end)))))
&& (GET_CODE (BB_END (bb)) != JUMP_INSN
|| (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
|| any_condjump_p (BB_END (bb))))))
{
error ("Too many outgoing branch edges from bb %i", bb->index);
err = 1;
}
if (n_fallthru && any_uncondjump_p (bb->end))
if (n_fallthru && any_uncondjump_p (BB_END (bb)))
{
error ("Fallthru edge after unconditional jump %i", bb->index);
err = 1;
}
if (n_branch != 1 && any_uncondjump_p (bb->end))
if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
{
error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
err = 1;
}
if (n_branch != 1 && any_condjump_p (bb->end)
&& JUMP_LABEL (bb->end) != fallthru->dest->head)
if (n_branch != 1 && any_condjump_p (BB_END (bb))
&& JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
{
error ("Wrong amount of branch edges after conditional jump %i", bb->index);
err = 1;
}
if (n_call && GET_CODE (bb->end) != CALL_INSN)
if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
{
error ("Call edges for non-call insn in bb %i", bb->index);
err = 1;
}
if (n_abnormal
&& (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
&& (GET_CODE (bb->end) != JUMP_INSN
|| any_condjump_p (bb->end)
|| any_uncondjump_p (bb->end)))
&& (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
&& (GET_CODE (BB_END (bb)) != JUMP_INSN
|| any_condjump_p (BB_END (bb))
|| any_uncondjump_p (BB_END (bb))))
{
error ("Abnormal edges for no purpose in bb %i", bb->index);
err = 1;
}
for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
if (BLOCK_FOR_INSN (x) != bb)
{
debug_rtx (x);
@ -2013,10 +2013,10 @@ rtl_verify_flow_info_1 (void)
/* OK pointers are correct. Now check the header of basic
block. It ought to contain optional CODE_LABEL followed
by NOTE_BASIC_BLOCK. */
x = bb->head;
x = BB_HEAD (bb);
if (GET_CODE (x) == CODE_LABEL)
{
if (bb->end == x)
if (BB_END (bb) == x)
{
error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
bb->index);
@ -2033,7 +2033,7 @@ rtl_verify_flow_info_1 (void)
err = 1;
}
if (bb->end == x)
if (BB_END (bb) == x)
/* Do checks for empty blocks her. e */
;
else
@ -2046,7 +2046,7 @@ rtl_verify_flow_info_1 (void)
err = 1;
}
if (x == bb->end)
if (x == BB_END (bb))
break;
if (control_flow_insn_p (x))
@ -2092,7 +2092,7 @@ rtl_verify_flow_info (void)
rtx insn;
/* Ensure existence of barrier in BB with no fallthru edges. */
for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
insn = NEXT_INSN (insn))
if (!insn
|| (GET_CODE (insn) == NOTE
@ -2116,7 +2116,7 @@ rtl_verify_flow_info (void)
err = 1;
}
else
for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == BARRIER
#ifndef CASE_DROPS_THROUGH
@ -2179,7 +2179,7 @@ rtl_verify_flow_info (void)
&& returnjump_p (x) && ! condjump_p (x)
&& ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
fatal_insn ("return not followed by barrier", x);
if (curr_bb && x == curr_bb->end)
if (curr_bb && x == BB_END (curr_bb))
curr_bb = NULL;
}
@ -2199,7 +2199,7 @@ bool
purge_dead_edges (basic_block bb)
{
edge e, next;
rtx insn = bb->end, note;
rtx insn = BB_END (bb), note;
bool purged = false;
/* If this instruction cannot trap, remove REG_EH_REGION notes. */
@ -2220,12 +2220,12 @@ purge_dead_edges (basic_block bb)
next = e->succ_next;
if (e->flags & EDGE_EH)
{
if (can_throw_internal (bb->end))
if (can_throw_internal (BB_END (bb)))
continue;
}
else if (e->flags & EDGE_ABNORMAL_CALL)
{
if (GET_CODE (bb->end) == CALL_INSN
if (GET_CODE (BB_END (bb)) == CALL_INSN
&& (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
|| INTVAL (XEXP (note, 0)) >= 0))
continue;
@ -2275,7 +2275,7 @@ purge_dead_edges (basic_block bb)
block, so we should keep the edge. */
continue;
else if (e->dest != EXIT_BLOCK_PTR
&& e->dest->head == JUMP_LABEL (insn))
&& BB_HEAD (e->dest) == JUMP_LABEL (insn))
/* If the destination block is the target of the jump,
keep the edge. */
continue;
@ -2462,10 +2462,10 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
if (e->flags & EDGE_FALLTHRU)
{
/* Redirect any branch edges unified with the fallthru one. */
if (GET_CODE (src->end) == JUMP_INSN
&& JUMP_LABEL (src->end) == e->dest->head)
if (GET_CODE (BB_END (src)) == JUMP_INSN
&& JUMP_LABEL (BB_END (src)) == BB_HEAD (e->dest))
{
if (!redirect_jump (src->end, block_label (dest), 0))
if (!redirect_jump (BB_END (src), block_label (dest), 0))
abort ();
}
/* In case we are redirecting fallthru edge to the branch edge
@ -2475,9 +2475,9 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
{
edge s = e->succ_next ? e->succ_next : src->succ;
if (s->dest == dest
&& any_condjump_p (src->end)
&& onlyjump_p (src->end))
delete_insn (src->end);
&& any_condjump_p (BB_END (src))
&& onlyjump_p (BB_END (src)))
delete_insn (BB_END (src));
}
redirect_edge_succ_nodup (e, dest);
if (rtl_dump_file)
@ -2490,7 +2490,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
ret = redirect_branch_edge (e, dest);
/* We don't want simplejumps in the insn stream during cfglayout. */
if (simplejump_p (src->end))
if (simplejump_p (BB_END (src)))
abort ();
return ret;
@ -2509,11 +2509,11 @@ cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
static void
cfg_layout_delete_block (basic_block bb)
{
rtx insn, next, prev = PREV_INSN (bb->head), *to, remaints;
rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
if (bb->rbi->header)
{
next = bb->head;
next = BB_HEAD (bb);
if (prev)
NEXT_INSN (prev) = bb->rbi->header;
else
@ -2525,7 +2525,7 @@ cfg_layout_delete_block (basic_block bb)
NEXT_INSN (insn) = next;
PREV_INSN (next) = insn;
}
next = NEXT_INSN (bb->end);
next = NEXT_INSN (BB_END (bb));
if (bb->rbi->footer)
{
insn = bb->rbi->footer;
@ -2546,7 +2546,7 @@ cfg_layout_delete_block (basic_block bb)
}
if (bb->rbi->footer)
{
insn = bb->end;
insn = BB_END (bb);
NEXT_INSN (insn) = bb->rbi->footer;
PREV_INSN (bb->rbi->footer) = insn;
while (NEXT_INSN (insn))
@ -2598,9 +2598,9 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (a->end) != JUMP_INSN
&& (GET_CODE (BB_END (a)) != JUMP_INSN
|| (flow2_completed
? simplejump_p (a->end) : onlyjump_p (a->end))));
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
/* Merge block A and B, abort when it is not possible. */
@ -2613,38 +2613,38 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
#endif
/* If there was a CODE_LABEL beginning B, delete it. */
if (GET_CODE (b->head) == CODE_LABEL)
delete_insn (b->head);
if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
delete_insn (BB_HEAD (b));
/* We should have fallthru edge in a, or we can do dummy redirection to get
it cleaned up. */
if (GET_CODE (a->end) == JUMP_INSN)
if (GET_CODE (BB_END (a)) == JUMP_INSN)
redirect_edge_and_branch (a->succ, b);
if (GET_CODE (a->end) == JUMP_INSN)
if (GET_CODE (BB_END (a)) == JUMP_INSN)
abort ();
/* Possible line number notes should appear in between. */
if (b->rbi->header)
{
rtx first = a->end, last;
rtx first = BB_END (a), last;
last = emit_insn_after (b->rbi->header, a->end);
last = emit_insn_after (b->rbi->header, BB_END (a));
delete_insn_chain (NEXT_INSN (first), last);
b->rbi->header = NULL;
}
/* In the case basic blocks are not adjacent, move them around. */
if (NEXT_INSN (a->end) != b->head)
if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
{
rtx first = unlink_insn_chain (b->head, b->end);
rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
emit_insn_after (first, a->end);
emit_insn_after (first, BB_END (a));
/* Skip possible DELETED_LABEL insn. */
if (!NOTE_INSN_BASIC_BLOCK_P (first))
first = NEXT_INSN (first);
if (!NOTE_INSN_BASIC_BLOCK_P (first))
abort ();
b->head = NULL;
BB_HEAD (b) = NULL;
delete_insn (first);
}
/* Otherwise just re-associate the instructions. */
@ -2652,16 +2652,18 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
{
rtx insn;
for (insn = b->head; insn != NEXT_INSN (b->end); insn = NEXT_INSN (insn))
for (insn = BB_HEAD (b);
insn != NEXT_INSN (BB_END (b));
insn = NEXT_INSN (insn))
set_block_for_insn (insn, a);
insn = b->head;
insn = BB_HEAD (b);
/* Skip possible DELETED_LABEL insn. */
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
insn = NEXT_INSN (insn);
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
abort ();
b->head = NULL;
a->end = b->end;
BB_HEAD (b) = NULL;
BB_END (a) = BB_END (b);
delete_insn (insn);
}
@ -2696,7 +2698,7 @@ cfg_layout_split_edge (edge e)
edge new_e;
basic_block new_bb =
create_basic_block (e->src != ENTRY_BLOCK_PTR
? NEXT_INSN (e->src-> end) : get_insns (),
? NEXT_INSN (BB_END (e->src)) : get_insns (),
NULL_RTX, e->src);
new_bb->count = e->count;

View File

@ -608,8 +608,8 @@ combine_instructions (rtx f, unsigned int nregs)
FOR_EACH_BB (this_basic_block)
{
for (insn = this_basic_block->head;
insn != NEXT_INSN (this_basic_block->end);
for (insn = BB_HEAD (this_basic_block);
insn != NEXT_INSN (BB_END (this_basic_block));
insn = next ? next : NEXT_INSN (insn))
{
next = 0;
@ -2337,7 +2337,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
for (insn = NEXT_INSN (i3);
insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|| insn != this_basic_block->next_bb->head);
|| insn != BB_HEAD (this_basic_block->next_bb));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
@ -2546,7 +2546,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
for (temp = NEXT_INSN (i2);
temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|| this_basic_block->head != temp);
|| BB_HEAD (this_basic_block) != temp);
temp = NEXT_INSN (temp))
if (temp != i3 && INSN_P (temp))
for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
@ -11978,7 +11978,7 @@ reg_dead_at_p (rtx reg, rtx insn)
else
{
FOR_EACH_BB (block)
if (insn == block->head)
if (insn == BB_HEAD (block))
break;
if (block == EXIT_BLOCK_PTR)
@ -12617,7 +12617,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
{
if (! INSN_P (tem))
{
if (tem == bb->head)
if (tem == BB_HEAD (bb))
break;
continue;
}
@ -12744,7 +12744,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
break;
}
if (tem == bb->head)
if (tem == BB_HEAD (bb))
break;
}
@ -12841,7 +12841,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
{
if (! INSN_P (tem))
{
if (tem == bb->head)
if (tem == BB_HEAD (bb))
{
SET_BIT (refresh_blocks,
this_basic_block->index);
@ -12948,7 +12948,7 @@ distribute_links (rtx links)
for (insn = NEXT_INSN (XEXP (link, 0));
(insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|| this_basic_block->next_bb->head != insn));
|| BB_HEAD (this_basic_block->next_bb) != insn));
insn = NEXT_INSN (insn))
if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
{

View File

@ -6402,16 +6402,16 @@ frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
/* Scan all of the blocks for registers that must not be allocated. */
for (j = 0; j < num_bb; j++)
{
rtx last_insn = bb[j]->end;
rtx insn = bb[j]->head;
rtx last_insn = BB_END (bb[j]);
rtx insn = BB_HEAD (bb[j]);
int regno;
if (rtl_dump_file)
fprintf (rtl_dump_file, "Scanning %s block %d, start %d, end %d\n",
(bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
(int) bb[j]->index,
(int) INSN_UID (bb[j]->head),
(int) INSN_UID (bb[j]->end));
(int) INSN_UID (BB_HEAD (bb[j])),
(int) INSN_UID (BB_END (bb[j])));
/* Anything live at the beginning of the block is obviously unavailable
for allocation. */
@ -6611,7 +6611,7 @@ frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
/* Record the check insn to be inserted later. */
frv_ifcvt_add_insn (check_insn, test_bb->end, TRUE);
frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
/* Update the tests. */
frv_ifcvt.cr_reg = cr;
@ -6729,7 +6729,7 @@ frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
/* First add the andcr/andncr/orcr/orncr, which will be added after the
conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
stack. */
frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), bb->end, TRUE);
frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
/* Now add the conditional check insn. */
cc = XEXP (test_expr, 0);
@ -6740,7 +6740,7 @@ frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
/* add the new check insn to the list of check insns that need to be
inserted. */
frv_ifcvt_add_insn (check_insn, bb->end, TRUE);
frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
if (TARGET_DEBUG_COND_EXEC)
{

View File

@ -15673,7 +15673,7 @@ ix86_reorg (void)
for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
{
basic_block bb = e->src;
rtx ret = bb->end;
rtx ret = BB_END (bb);
rtx prev;
bool replace = false;

View File

@ -7140,7 +7140,7 @@ emit_predicate_relation_info (void)
FOR_EACH_BB_REVERSE (bb)
{
int r;
rtx head = bb->head;
rtx head = BB_HEAD (bb);
/* We only need such notes at code labels. */
if (GET_CODE (head) != CODE_LABEL)
@ -7154,8 +7154,8 @@ emit_predicate_relation_info (void)
{
rtx p = gen_rtx_REG (BImode, r);
rtx n = emit_insn_after (gen_pred_rel_mutex (p), head);
if (head == bb->end)
bb->end = n;
if (head == BB_END (bb))
BB_END (bb) = n;
head = n;
}
}
@ -7166,7 +7166,7 @@ emit_predicate_relation_info (void)
the call. */
FOR_EACH_BB_REVERSE (bb)
{
rtx insn = bb->head;
rtx insn = BB_HEAD (bb);
while (1)
{
@ -7176,13 +7176,13 @@ emit_predicate_relation_info (void)
{
rtx b = emit_insn_before (gen_safe_across_calls_all (), insn);
rtx a = emit_insn_after (gen_safe_across_calls_normal (), insn);
if (bb->head == insn)
bb->head = b;
if (bb->end == insn)
bb->end = a;
if (BB_HEAD (bb) == insn)
BB_HEAD (bb) = b;
if (BB_END (bb) == insn)
BB_END (bb) = a;
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
insn = NEXT_INSN (insn);
}

View File

@ -435,9 +435,9 @@ conflict_graph_compute (regset regs, partition p)
AND_REG_SET (live, regs);
/* Walk the instruction stream backwards. */
head = bb->head;
insn = bb->end;
for (insn = bb->end; insn != head; insn = PREV_INSN (insn))
head = BB_HEAD (bb);
insn = BB_END (bb);
for (insn = BB_END (bb); insn != head; insn = PREV_INSN (insn))
{
int born_reg;
int live_reg;

View File

@ -1215,14 +1215,14 @@ df_bb_refs_record (struct df *df, basic_block bb)
rtx insn;
/* Scan the block an insn at a time from beginning to end. */
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
/* Record defs within INSN. */
df_insn_refs_record (df, bb, insn);
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}
@ -1255,7 +1255,7 @@ df_bb_reg_def_chain_create (struct df *df, basic_block bb)
scan the basic blocks in reverse order so that the first defs
appear at the start of the chain. */
for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
struct df_link *link;
@ -1307,7 +1307,7 @@ df_bb_reg_use_chain_create (struct df *df, basic_block bb)
/* Scan in forward order so that the last uses appear at the start
of the chain. */
for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
struct df_link *link;
@ -1360,7 +1360,7 @@ df_bb_du_chain_create (struct df *df, basic_block bb, bitmap ru)
/* For each def in BB create a linked list (chain) of uses
reached from the def. */
for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
struct df_link *def_link;
@ -1437,7 +1437,7 @@ df_bb_ud_chain_create (struct df *df, basic_block bb)
/* For each use in BB create a linked list (chain) of defs
that reach the use. */
for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
unsigned int uid = INSN_UID (insn);
@ -1547,7 +1547,7 @@ df_bb_rd_local_compute (struct df *df, basic_block bb)
struct bb_info *bb_info = DF_BB_INFO (df, bb);
rtx insn;
for (insn = bb->head; insn && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
unsigned int uid = INSN_UID (insn);
@ -1611,7 +1611,7 @@ df_bb_ru_local_compute (struct df *df, basic_block bb)
rtx insn;
for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
unsigned int uid = INSN_UID (insn);
@ -1674,7 +1674,7 @@ df_bb_lr_local_compute (struct df *df, basic_block bb)
struct bb_info *bb_info = DF_BB_INFO (df, bb);
rtx insn;
for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
unsigned int uid = INSN_UID (insn);
@ -1729,7 +1729,7 @@ df_bb_reg_info_compute (struct df *df, basic_block bb, bitmap live)
bitmap_copy (live, bb_info->lr_out);
for (insn = bb->end; insn && insn != PREV_INSN (bb->head);
for (insn = BB_END (bb); insn && insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
unsigned int uid = INSN_UID (insn);
@ -1795,13 +1795,13 @@ df_bb_luids_set (struct df *df, basic_block bb)
/* The LUIDs are monotonically increasing for each basic block. */
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
DF_INSN_LUID (df, insn) = luid++;
DF_INSN_LUID (df, insn) = luid;
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
return luid;
@ -2096,7 +2096,7 @@ df_bb_refs_update (struct df *df, basic_block bb)
a bitmap for insns_modified saves memory and avoids queuing
duplicates. */
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
unsigned int uid;
@ -2112,7 +2112,7 @@ df_bb_refs_update (struct df *df, basic_block bb)
count++;
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
return count;
@ -2248,14 +2248,14 @@ df_bb_refs_unlink (struct df *df, basic_block bb)
rtx insn;
/* Scan the block an insn at a time from beginning to end. */
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
/* Unlink refs for INSN. */
df_insn_refs_unlink (df, bb, insn);
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}
@ -2294,7 +2294,7 @@ df_insn_delete (struct df *df, basic_block bb ATTRIBUTE_UNUSED, rtx insn)
handle the JUMP_LABEL? */
/* We should not be deleting the NOTE_INSN_BASIC_BLOCK or label. */
if (insn == bb->head)
if (insn == BB_HEAD (bb))
abort ();
/* Delete the insn. */
@ -2591,7 +2591,7 @@ df_pattern_emit_before (struct df *df, rtx pattern, basic_block bb, rtx insn)
rtx prev_insn = PREV_INSN (insn);
/* We should not be inserting before the start of the block. */
if (insn == bb->head)
if (insn == BB_HEAD (bb))
abort ();
ret_insn = emit_insn_before (pattern, insn);
if (ret_insn == insn)

View File

@ -3803,12 +3803,12 @@ add_insn_after (rtx insn, rtx after)
bb->flags |= BB_DIRTY;
/* Should not happen as first in the BB is always
either NOTE or LABEL. */
if (bb->end == after
if (BB_END (bb) == after
/* Avoid clobbering of structure when creating new BB. */
&& GET_CODE (insn) != BARRIER
&& (GET_CODE (insn) != NOTE
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
bb->end = insn;
BB_END (bb) = insn;
}
NEXT_INSN (after) = insn;
@ -3871,7 +3871,7 @@ add_insn_before (rtx insn, rtx before)
bb->flags |= BB_DIRTY;
/* Should not happen as first in the BB is always
either NOTE or LABEl. */
if (bb->head == insn
if (BB_HEAD (bb) == insn
/* Avoid clobbering of structure when creating new BB. */
&& GET_CODE (insn) != BARRIER
&& (GET_CODE (insn) != NOTE
@ -3946,16 +3946,16 @@ remove_insn (rtx insn)
{
if (INSN_P (insn))
bb->flags |= BB_DIRTY;
if (bb->head == insn)
if (BB_HEAD (bb) == insn)
{
/* Never ever delete the basic block note without deleting whole
basic block. */
if (GET_CODE (insn) == NOTE)
abort ();
bb->head = next;
BB_HEAD (bb) = next;
}
if (bb->end == insn)
bb->end = prev;
if (BB_END (bb) == insn)
BB_END (bb) = prev;
}
}
@ -4048,13 +4048,13 @@ reorder_insns (rtx from, rtx to, rtx after)
if (GET_CODE (from) != BARRIER
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (bb2->end == to)
bb2->end = prev;
if (BB_END (bb2) == to)
BB_END (bb2) = prev;
bb2->flags |= BB_DIRTY;
}
if (bb->end == after)
bb->end = to;
if (BB_END (bb) == after)
BB_END (bb) = to;
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
set_block_for_insn (x, bb);
@ -4443,8 +4443,8 @@ emit_insn_after_1 (rtx first, rtx after)
set_block_for_insn (last, bb);
if (GET_CODE (last) != BARRIER)
set_block_for_insn (last, bb);
if (bb->end == after)
bb->end = last;
if (BB_END (bb) == after)
BB_END (bb) = last;
}
else
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))

View File

@ -673,7 +673,7 @@ compute_alignments (void)
FOR_EACH_BB (bb)
{
rtx label = bb->head;
rtx label = BB_HEAD (bb);
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
edge e;

View File

@ -376,7 +376,7 @@ first_insn_after_basic_block_note (basic_block block)
rtx insn;
/* Get the first instruction in the block. */
insn = block->head;
insn = BB_HEAD (block);
if (insn == NULL_RTX)
return NULL_RTX;
@ -505,7 +505,7 @@ verify_wide_reg_1 (rtx *px, void *pregno)
static void
verify_wide_reg (int regno, basic_block bb)
{
rtx head = bb->head, end = bb->end;
rtx head = BB_HEAD (bb), end = BB_END (bb);
while (1)
{
@ -822,7 +822,7 @@ delete_noop_moves (rtx f ATTRIBUTE_UNUSED)
FOR_EACH_BB (bb)
{
for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = next)
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
{
next = NEXT_INSN (insn);
if (INSN_P (insn) && noop_move_p (insn))
@ -1844,8 +1844,8 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
/* If this block ends in a conditional branch, for each register
live from one side of the branch and not the other, record the
register as conditionally dead. */
if (GET_CODE (bb->end) == JUMP_INSN
&& any_condjump_p (bb->end))
if (GET_CODE (BB_END (bb)) == JUMP_INSN
&& any_condjump_p (BB_END (bb)))
{
regset_head diff_head;
regset diff = INITIALIZE_REG_SET (diff_head);
@ -1870,7 +1870,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
else
{
/* This can happen with a conditional jump to the next insn. */
if (JUMP_LABEL (bb->end) != bb_true->head)
if (JUMP_LABEL (BB_END (bb)) != BB_HEAD (bb_true))
abort ();
/* Simplest way to do nothing. */
@ -1882,7 +1882,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
bb_false->global_live_at_start, BITMAP_XOR))
{
/* Extract the condition from the branch. */
rtx set_src = SET_SRC (pc_set (bb->end));
rtx set_src = SET_SRC (pc_set (BB_END (bb)));
rtx cond_true = XEXP (set_src, 0);
rtx reg = XEXP (cond_true, 0);
@ -1951,7 +1951,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
&& ! current_function_calls_eh_return)))
{
rtx insn, set;
for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
if (GET_CODE (insn) == INSN
&& (set = single_set (insn))
&& GET_CODE (SET_DEST (set)) == MEM)
@ -2031,7 +2031,7 @@ propagate_block (basic_block bb, regset live, regset local_set,
/* Scan the block an insn at a time from end to beginning. */
changed = 0;
for (insn = bb->end;; insn = prev)
for (insn = BB_END (bb); ; insn = prev)
{
/* If this is a call to `setjmp' et al, warn if any
non-volatile datum is live. */
@ -2046,7 +2046,7 @@ propagate_block (basic_block bb, regset live, regset local_set,
else
changed |= NEXT_INSN (prev) != insn;
if (insn == bb->head)
if (insn == BB_HEAD (bb))
break;
}
@ -3313,8 +3313,8 @@ attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
new insn(s) and do the updates. */
emit_insn_before (insns, insn);
if (pbi->bb->head == insn)
pbi->bb->head = insns;
if (BB_HEAD (pbi->bb) == insn)
BB_HEAD (pbi->bb) = insns;
/* INCR will become a NOTE and INSN won't contain a
use of INCR_REG. If a use of INCR_REG was just placed in
@ -4229,7 +4229,7 @@ count_or_remove_death_notes_bb (basic_block bb, int kill)
int count = 0;
rtx insn;
for (insn = bb->head;; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
@ -4273,7 +4273,7 @@ count_or_remove_death_notes_bb (basic_block bb, int kill)
}
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
@ -4300,7 +4300,7 @@ clear_log_links (sbitmap blocks)
{
basic_block bb = BASIC_BLOCK (i);
for (insn = bb->head; insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
free_INSN_LIST_list (&LOG_LINKS (insn));

View File

@ -7293,9 +7293,9 @@ sibcall_epilogue_contains (rtx insn)
static void
emit_return_into_block (basic_block bb, rtx line_note)
{
emit_jump_insn_after (gen_return (), bb->end);
emit_jump_insn_after (gen_return (), BB_END (bb));
if (line_note)
emit_note_copy_after (line_note, PREV_INSN (bb->end));
emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
}
#endif /* HAVE_return */
@ -7720,7 +7720,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
last = e->src;
/* Verify that there are no active instructions in the last block. */
label = last->end;
label = BB_END (last);
while (label && GET_CODE (label) != CODE_LABEL)
{
if (active_insn_p (label))
@ -7728,7 +7728,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
label = PREV_INSN (label);
}
if (last->head == label && GET_CODE (label) == CODE_LABEL)
if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
{
rtx epilogue_line_note = NULL_RTX;
@ -7752,7 +7752,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
if (bb == ENTRY_BLOCK_PTR)
continue;
jump = bb->end;
jump = BB_END (bb);
if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
continue;
@ -7787,9 +7787,9 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
/* Emit a return insn for the exit fallthru block. Whether
this is still reachable will be determined later. */
emit_barrier_after (last->end);
emit_barrier_after (BB_END (last));
emit_return_into_block (last, epilogue_line_note);
epilogue_end = last->end;
epilogue_end = BB_END (last);
last->succ->flags &= ~EDGE_FALLTHRU;
goto epilogue_done;
}
@ -7845,7 +7845,7 @@ epilogue_done:
for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
{
basic_block bb = e->src;
rtx insn = bb->end;
rtx insn = BB_END (bb);
rtx i;
rtx newinsn;
@ -7902,7 +7902,7 @@ epilogue_done:
}
/* Find the last line number note in the first block. */
for (insn = ENTRY_BLOCK_PTR->next_bb->end;
for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
insn != prologue_end && insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)

View File

@ -2203,7 +2203,7 @@ hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
/* A copy is not available if its src or dest is subsequently
modified. Here we want to search from INSN+1 on, but
oprs_available_p searches from INSN on. */
&& (insn == BLOCK_END (BLOCK_NUM (insn))
&& (insn == BB_END (BLOCK_FOR_INSN (insn))
|| ((tmp = next_nonnote_insn (insn)) != NULL_RTX
&& oprs_available_p (pat, tmp))))
insert_set_in_table (pat, insn, table);
@ -2510,8 +2510,8 @@ compute_hash_table_work (struct hash_table *table)
??? hard-reg reg_set_in_block computation
could be moved to compute_sets since they currently don't change. */
for (insn = current_bb->head;
insn && insn != NEXT_INSN (current_bb->end);
for (insn = BB_HEAD (current_bb);
insn && insn != NEXT_INSN (BB_END (current_bb));
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
@ -2541,12 +2541,12 @@ compute_hash_table_work (struct hash_table *table)
if (table->set_p
&& implicit_sets[current_bb->index] != NULL_RTX)
hash_scan_set (implicit_sets[current_bb->index],
current_bb->head, table);
BB_HEAD (current_bb), table);
/* The next pass builds the hash table. */
for (insn = current_bb->head, in_libcall_block = 0;
insn && insn != NEXT_INSN (current_bb->end);
for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
insn && insn != NEXT_INSN (BB_END (current_bb));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
@ -3540,8 +3540,8 @@ classic_gcse (void)
start of the block]. */
reset_opr_set_tables ();
for (insn = bb->head;
insn != NULL && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
/* Is insn of form (set (pseudo-reg) ...)? */
@ -4477,8 +4477,8 @@ cprop (int alter_jumps)
start of the block]. */
reset_opr_set_tables ();
for (insn = bb->head;
insn != NULL && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
@ -4578,7 +4578,7 @@ find_implicit_sets (void)
/* Check for more than one successor. */
if (bb->succ && bb->succ->succ_next)
{
cond = fis_get_condition (bb->end);
cond = fis_get_condition (BB_END (bb));
if (cond
&& (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
@ -4901,8 +4901,8 @@ bypass_conditional_jumps (void)
if (bb->pred && bb->pred->pred_next)
{
setcc = NULL_RTX;
for (insn = bb->head;
insn != NULL && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == INSN)
{
@ -5193,7 +5193,7 @@ process_insert_insn (struct expr *expr)
static void
insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
{
rtx insn = bb->end;
rtx insn = BB_END (bb);
rtx new_insn;
rtx reg = expr->reaching_reg;
int regno = REGNO (reg);
@ -5274,7 +5274,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
/* Since different machines initialize their parameter registers
in different orders, assume nothing. Collect the set of all
parameter registers. */
insn = find_first_parameter_load (insn, bb->head);
insn = find_first_parameter_load (insn, BB_HEAD (bb));
/* If we found all the parameter loads, then we want to insert
before the first parameter load.
@ -5834,7 +5834,7 @@ compute_transpout (void)
/* Note that flow inserted a nop a the end of basic blocks that
end in call instructions for reasons other than abnormal
control flow. */
if (GET_CODE (bb->end) != CALL_INSN)
if (GET_CODE (BB_END (bb)) != CALL_INSN)
continue;
for (i = 0; i < expr_hash_table.size; i++)
@ -5916,8 +5916,8 @@ delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
/* Scan each insn in the basic block looking for memory references and
register sets. */
stop_insn = NEXT_INSN (current_block->end);
for (insn = current_block->head;
stop_insn = NEXT_INSN (BB_HEAD (current_block));
for (insn = BB_HEAD (current_block);
insn != stop_insn;
insn = NEXT_INSN (insn))
{
@ -5972,7 +5972,7 @@ delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
against zero. */
FOR_EACH_BB (bb)
{
rtx last_insn = bb->end;
rtx last_insn = BB_END (bb);
rtx condition, earliest;
int compare_and_branch;
@ -6022,7 +6022,7 @@ delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
delete_insn (earliest);
purge_dead_edges (bb);
/* Don't check this block again. (Note that BLOCK_END is
/* Don't check this block again. (Note that BB_END is
invalid here; we deleted the last instruction in the
block.) */
block_reg[bb->index] = 0;
@ -6088,7 +6088,7 @@ delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
block_reg = xcalloc (last_basic_block, sizeof (int));
FOR_EACH_BB (bb)
{
rtx last_insn = bb->end;
rtx last_insn = BB_END (bb);
rtx condition, earliest, reg;
/* We only want conditional branches. */
@ -6747,8 +6747,8 @@ compute_ld_motion_mems (void)
FOR_EACH_BB (bb)
{
for (insn = bb->head;
insn && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
@ -7156,7 +7156,7 @@ find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
failed last time. */
if (LAST_AVAIL_CHECK_FAILURE (ptr))
{
for (tmp = bb->end;
for (tmp = BB_END (bb);
tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
tmp = PREV_INSN (tmp))
continue;
@ -7199,8 +7199,8 @@ compute_store_table (void)
/* First compute the registers set in this block. */
regvec = last_set_in;
for (insn = bb->head;
insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
@ -7232,8 +7232,8 @@ compute_store_table (void)
/* Now find the stores. */
memset (already_set, 0, sizeof (int) * max_gcse_regno);
regvec = already_set;
for (insn = bb->head;
insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
@ -7466,7 +7466,7 @@ static bool
store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
int *regs_set_after, rtx *fail_insn)
{
rtx last = bb->end, act;
rtx last = BB_END (bb), act;
if (!store_ops_ok (x_regs, regs_set_after))
{
@ -7495,7 +7495,7 @@ static bool
store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
int *regs_set_before)
{
rtx first = bb->head;
rtx first = BB_HEAD (bb);
if (!store_ops_ok (x_regs, regs_set_before))
return true;
@ -7570,7 +7570,7 @@ build_store_vectors (void)
for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
{
if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
bb, regs_set_in_block, NULL))
{
/* It should not be necessary to consider the expression
@ -7596,14 +7596,14 @@ build_store_vectors (void)
}
/* Insert an instruction at the beginning of a basic block, and update
the BLOCK_HEAD if needed. */
the BB_HEAD if needed. */
static void
insert_insn_start_bb (rtx insn, basic_block bb)
{
/* Insert at start of successor block. */
rtx prev = PREV_INSN (bb->head);
rtx before = bb->head;
rtx prev = PREV_INSN (BB_HEAD (bb));
rtx before = BB_HEAD (bb);
while (before != 0)
{
if (GET_CODE (before) != CODE_LABEL
@ -7611,7 +7611,7 @@ insert_insn_start_bb (rtx insn, basic_block bb)
|| NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
break;
prev = before;
if (prev == bb->end)
if (prev == BB_END (bb))
break;
before = NEXT_INSN (before);
}
@ -7746,9 +7746,9 @@ remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
last = XEXP (last, 0);
}
else
last = NEXT_INSN (bb->end);
last = NEXT_INSN (BB_END (bb));
for (insn = bb->head; insn != last; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
note = find_reg_equal_equiv_note (insn);
@ -7802,7 +7802,7 @@ replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
/* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
they are no longer accurate provided that they are reached by this
definition, so drop them. */
for (; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
if (INSN_P (insn))
{
set = single_set (insn);

View File

@ -2914,10 +2914,10 @@ main(int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED)
do_scalar_typedef ("uint8", &pos);
do_scalar_typedef ("jword", &pos);
do_scalar_typedef ("JCF_u2", &pos);
do_scalar_typedef ("void", &pos);
do_typedef ("PTR", create_pointer (resolve_typedef ("void", &pos)), &pos);
do_typedef ("PTR", create_pointer (create_scalar_type ("void",
strlen ("void"))),
&pos);
do_typedef ("HARD_REG_SET", create_array (
create_scalar_type ("unsigned long", strlen ("unsigned long")),
"2"), &pos);

View File

@ -752,7 +752,7 @@ global_conflicts (void)
}
}
insn = b->head;
insn = BB_HEAD (b);
/* Scan the code of this basic block, noting which allocnos
and hard regs are born or die. When one is born,
@ -852,7 +852,7 @@ global_conflicts (void)
}
}
if (insn == b->end)
if (insn == BB_END (b))
break;
insn = NEXT_INSN (insn);
}
@ -1801,7 +1801,7 @@ build_insn_chain (rtx first)
{
struct insn_chain *c;
if (first == b->head)
if (first == BB_HEAD (b))
{
int i;
@ -1863,7 +1863,7 @@ build_insn_chain (rtx first)
}
}
if (first == b->end)
if (first == BB_END (b))
b = b->next_bb;
/* Stop after we pass the end of the last basic block. Verify that

View File

@ -260,14 +260,14 @@ print_rtl_graph_with_bb (const char *base, const char *suffix, rtx rtx_first)
FOR_EACH_BB_REVERSE (bb)
{
rtx x;
start[INSN_UID (bb->head)] = bb->index;
end[INSN_UID (bb->end)] = bb->index;
for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
start[INSN_UID (BB_HEAD (bb))] = bb->index;
end[INSN_UID (BB_END (bb))] = bb->index;
for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
{
in_bb_p[INSN_UID (x)]
= (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
? IN_ONE_BB : IN_MULTIPLE_BB;
if (x == bb->end)
if (x == BB_END (bb))
break;
}
}
@ -321,7 +321,7 @@ print_rtl_graph_with_bb (const char *base, const char *suffix, rtx rtx_first)
{
if (e->dest != EXIT_BLOCK_PTR)
{
rtx block_head = e->dest->head;
rtx block_head = BB_HEAD (e->dest);
draw_edge (fp, INSN_UID (tmp_rtx),
INSN_UID (block_head),

View File

@ -123,8 +123,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
This pass must update information that subsequent passes expect to
be correct. Namely: reg_n_refs, reg_n_sets, reg_n_deaths,
reg_n_calls_crossed, and reg_live_length. Also, BLOCK_HEAD,
BLOCK_END.
reg_n_calls_crossed, and reg_live_length. Also, BB_HEAD, BB_END.
The information in the line number notes is carefully retained by
this pass. Notes that refer to the starting and ending of
@ -1432,8 +1431,8 @@ void
get_block_head_tail (int b, rtx *headp, rtx *tailp)
{
/* HEAD and TAIL delimit the basic block being scheduled. */
rtx head = BLOCK_HEAD (b);
rtx tail = BLOCK_END (b);
rtx head = BB_HEAD (BASIC_BLOCK (b));
rtx tail = BB_END (BASIC_BLOCK (b));
/* Don't include any notes or labels at the beginning of the
basic block, or notes at the ends of basic blocks. */
@ -2791,7 +2790,7 @@ sched_init (FILE *dump_file)
h_i_d[0].luid = 0;
luid = 1;
FOR_EACH_BB (b)
for (insn = b->head;; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (b); ; insn = NEXT_INSN (insn))
{
INSN_LUID (insn) = luid;
@ -2803,7 +2802,7 @@ sched_init (FILE *dump_file)
if (GET_CODE (insn) != NOTE)
++luid;
if (insn == b->end)
if (insn == BB_END (b))
break;
}
@ -2825,7 +2824,7 @@ sched_init (FILE *dump_file)
FOR_EACH_BB (b)
{
for (line = b->head; line; line = PREV_INSN (line))
for (line = BB_HEAD (b); line; line = PREV_INSN (line))
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
{
line_note_head[b->index] = line;
@ -2833,7 +2832,7 @@ sched_init (FILE *dump_file)
}
/* Do a forward search as well, since we won't get to see the first
notes in a basic block. */
for (line = b->head; line; line = NEXT_INSN (line))
for (line = BB_HEAD (b); line; line = NEXT_INSN (line))
{
if (INSN_P (line))
break;
@ -2852,16 +2851,16 @@ sched_init (FILE *dump_file)
/* ??? Add a NOTE after the last insn of the last basic block. It is not
known why this is done. */
insn = EXIT_BLOCK_PTR->prev_bb->end;
insn = BB_END (EXIT_BLOCK_PTR->prev_bb);
if (NEXT_INSN (insn) == 0
|| (GET_CODE (insn) != NOTE
&& GET_CODE (insn) != CODE_LABEL
/* Don't emit a NOTE if it would end up before a BARRIER. */
&& GET_CODE (NEXT_INSN (insn)) != BARRIER))
{
emit_note_after (NOTE_INSN_DELETED, EXIT_BLOCK_PTR->prev_bb->end);
emit_note_after (NOTE_INSN_DELETED, BB_END (EXIT_BLOCK_PTR->prev_bb));
/* Make insn to appear outside BB. */
EXIT_BLOCK_PTR->prev_bb->end = PREV_INSN (EXIT_BLOCK_PTR->prev_bb->end);
BB_END (EXIT_BLOCK_PTR->prev_bb) = PREV_INSN (BB_END (EXIT_BLOCK_PTR->prev_bb));
}
/* Compute INSN_REG_WEIGHT for all blocks. We must do this before

View File

@ -148,14 +148,14 @@ static int
count_bb_insns (basic_block bb)
{
int count = 0;
rtx insn = bb->head;
rtx insn = BB_HEAD (bb);
while (1)
{
if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
count++;
if (insn == bb->end)
if (insn == BB_END (bb))
break;
insn = NEXT_INSN (insn);
}
@ -168,18 +168,18 @@ count_bb_insns (basic_block bb)
static rtx
first_active_insn (basic_block bb)
{
rtx insn = bb->head;
rtx insn = BB_HEAD (bb);
if (GET_CODE (insn) == CODE_LABEL)
{
if (insn == bb->end)
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
while (GET_CODE (insn) == NOTE)
{
if (insn == bb->end)
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
@ -195,8 +195,8 @@ first_active_insn (basic_block bb)
static rtx
last_active_insn (basic_block bb, int skip_use_p)
{
rtx insn = bb->end;
rtx head = bb->head;
rtx insn = BB_END (bb);
rtx head = BB_HEAD (bb);
while (GET_CODE (insn) == NOTE
|| GET_CODE (insn) == JUMP_INSN
@ -407,13 +407,13 @@ cond_exec_process_if_block (ce_if_block_t * ce_info,
/* Find the conditional jump to the ELSE or JOIN part, and isolate
the test. */
test_expr = cond_exec_get_condition (test_bb->end);
test_expr = cond_exec_get_condition (BB_END (test_bb));
if (! test_expr)
return FALSE;
/* If the conditional jump is more than just a conditional jump,
then we can not do conditional execution conversion on this block. */
if (! onlyjump_p (test_bb->end))
if (! onlyjump_p (BB_END (test_bb)))
return FALSE;
/* Collect the bounds of where we're to search, skipping any labels, jumps
@ -440,7 +440,7 @@ cond_exec_process_if_block (ce_if_block_t * ce_info,
true_expr = test_expr;
false_code = reversed_comparison_code (true_expr, test_bb->end);
false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
if (false_code != UNKNOWN)
false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
XEXP (true_expr, 0), XEXP (true_expr, 1));
@ -457,7 +457,7 @@ cond_exec_process_if_block (ce_if_block_t * ce_info,
goto fail;
#endif
true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
if (true_prob_val)
{
true_prob_val = XEXP (true_prob_val, 0);
@ -491,11 +491,11 @@ cond_exec_process_if_block (ce_if_block_t * ce_info,
/* If the conditional jump is more than just a conditional jump, then
we can not do conditional execution conversion on this block. */
if (! onlyjump_p (bb->end))
if (! onlyjump_p (BB_END (bb)))
goto fail;
/* Find the conditional jump and isolate the test. */
t = cond_exec_get_condition (bb->end);
t = cond_exec_get_condition (BB_END (bb));
if (! t)
goto fail;
@ -1629,7 +1629,7 @@ noce_try_abs (struct noce_if_info *if_info)
{
rtx insn, note = NULL;
for (insn = earliest;
insn != if_info->test_bb->head;
insn != BB_HEAD (if_info->test_bb);
insn = PREV_INSN (insn))
if (INSN_P (insn)
&& ((note = find_reg_note (insn, REG_EQUAL, c))
@ -1829,7 +1829,7 @@ noce_process_if_block (struct ce_if_block * ce_info)
}
/* If this is not a standard conditional jump, we can't parse it. */
jump = test_bb->end;
jump = BB_END (test_bb);
cond = noce_get_condition (jump, &if_info.cond_earliest);
if (! cond)
return FALSE;
@ -1945,8 +1945,8 @@ noce_process_if_block (struct ce_if_block * ce_info)
{
rtx note;
if (else_bb && insn_b == else_bb->end)
else_bb->end = PREV_INSN (insn_b);
if (else_bb && insn_b == BB_END (else_bb))
BB_END (else_bb) = PREV_INSN (insn_b);
reorder_insns (insn_b, insn_b, PREV_INSN (jump));
/* If there was a REG_EQUAL note, delete it since it may have been
@ -2019,7 +2019,7 @@ noce_process_if_block (struct ce_if_block * ce_info)
unshare_all_rtl_in_chain (insn_b);
end_sequence ();
emit_insn_after_setloc (insn_b, test_bb->end, INSN_LOCATOR (insn_a));
emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
}
/* Merge the blocks! */
@ -2125,7 +2125,7 @@ merge_if_block (struct ce_if_block * ce_info)
if (! join_bb)
{
rtx last = combo_bb->end;
rtx last = BB_END (combo_bb);
/* The outgoing edge for the current COMBO block should already
be correct. Verify this. */
@ -2316,8 +2316,8 @@ block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
together for conditional execution support. ??? we should support
conditional execution support across calls for IA-64 some day, but
for now it makes the code simpler. */
end = cur_bb->end;
insn = cur_bb->head;
end = BB_END (cur_bb);
insn = BB_HEAD (cur_bb);
while (insn != NULL_RTX)
{
@ -2448,7 +2448,7 @@ find_if_block (struct ce_if_block * ce_info)
if (then_succ != NULL_EDGE
&& (then_succ->succ_next != NULL_EDGE
|| (then_succ->flags & EDGE_COMPLEX)
|| (flow2_completed && tablejump_p (then_bb->end, NULL, NULL))))
|| (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
return FALSE;
/* If the THEN block has no successors, conditional execution can still
@ -2461,11 +2461,11 @@ find_if_block (struct ce_if_block * ce_info)
{
if (else_bb->pred->pred_next == NULL_EDGE)
{
rtx last_insn = then_bb->end;
rtx last_insn = BB_END (then_bb);
while (last_insn
&& GET_CODE (last_insn) == NOTE
&& last_insn != then_bb->head)
&& last_insn != BB_HEAD (then_bb))
last_insn = PREV_INSN (last_insn);
if (last_insn
@ -2496,7 +2496,7 @@ find_if_block (struct ce_if_block * ce_info)
&& else_bb->pred->pred_next == NULL_EDGE
&& else_succ->succ_next == NULL_EDGE
&& ! (else_succ->flags & EDGE_COMPLEX)
&& ! (flow2_completed && tablejump_p (else_bb->end, NULL, NULL)))
&& ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
join_bb = else_succ->dest;
/* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
@ -2510,15 +2510,15 @@ find_if_block (struct ce_if_block * ce_info)
fprintf (rtl_dump_file, "\nIF-THEN%s block found, pass %d, start block %d [insn %d], then %d [%d]",
(else_bb) ? "-ELSE" : "",
ce_info->pass,
test_bb->index, (test_bb->head) ? (int)INSN_UID (test_bb->head) : -1,
then_bb->index, (then_bb->head) ? (int)INSN_UID (then_bb->head) : -1);
test_bb->index, (BB_HEAD (test_bb)) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
then_bb->index, (BB_HEAD (then_bb)) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
if (else_bb)
fprintf (rtl_dump_file, ", else %d [%d]",
else_bb->index, (else_bb->head) ? (int)INSN_UID (else_bb->head) : -1);
else_bb->index, (BB_HEAD (else_bb)) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
fprintf (rtl_dump_file, ", join %d [%d]",
join_bb->index, (join_bb->head) ? (int)INSN_UID (join_bb->head) : -1);
join_bb->index, (BB_HEAD (join_bb)) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
if (ce_info->num_multiple_test_blocks > 0)
fprintf (rtl_dump_file, ", %d %s block%s last test %d [%d]",
@ -2526,8 +2526,8 @@ find_if_block (struct ce_if_block * ce_info)
(ce_info->and_and_p) ? "&&" : "||",
(ce_info->num_multiple_test_blocks == 1) ? "" : "s",
ce_info->last_test_bb->index,
((ce_info->last_test_bb->head)
? (int)INSN_UID (ce_info->last_test_bb->head)
((BB_HEAD (ce_info->last_test_bb))
? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
: -1));
fputc ('\n', rtl_dump_file);
@ -2588,7 +2588,7 @@ find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
}
/* If this is not a standard conditional jump, we can't parse it. */
jump = test_bb->end;
jump = BB_END (test_bb);
cond = noce_get_condition (jump, &cond_earliest);
if (! cond)
return FALSE;
@ -2679,7 +2679,7 @@ block_has_only_trap (basic_block bb)
/* The only instruction in the THEN block must be the trap. */
trap = first_active_insn (bb);
if (! (trap == bb->end
if (! (trap == BB_END (bb)
&& GET_CODE (PATTERN (trap)) == TRAP_IF
&& TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
return NULL_RTX;
@ -2861,7 +2861,7 @@ find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
return FALSE;
/* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
;
else if (else_succ->dest->index < 0
@ -2928,11 +2928,11 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
{
rtx head, end, jump, earliest, old_dest, new_label = NULL_RTX;
jump = test_bb->end;
jump = BB_END (test_bb);
/* Find the extent of the real code in the merge block. */
head = merge_bb->head;
end = merge_bb->end;
head = BB_HEAD (merge_bb);
end = BB_END (merge_bb);
if (GET_CODE (head) == CODE_LABEL)
head = NEXT_INSN (head);
@ -3152,8 +3152,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
/* Move the insns out of MERGE_BB to before the branch. */
if (head != NULL)
{
if (end == merge_bb->end)
merge_bb->end = PREV_INSN (head);
if (end == BB_END (merge_bb))
BB_END (merge_bb) = PREV_INSN (head);
if (squeeze_notes (&head, &end))
return TRUE;

View File

@ -1058,8 +1058,8 @@ optimize_mode_switching (FILE *file)
REG_SET_TO_HARD_REG_SET (live_now,
bb->global_live_at_start);
for (insn = bb->head;
insn != NULL && insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
@ -1093,7 +1093,7 @@ optimize_mode_switching (FILE *file)
/* Check for blocks without ANY mode requirements. */
if (last_mode == no_mode)
{
ptr = new_seginfo (no_mode, bb->end, bb->index, live_now);
ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
add_seginfo (info + bb->index, ptr);
}
}
@ -1201,8 +1201,8 @@ optimize_mode_switching (FILE *file)
if (eg->flags & EDGE_ABNORMAL)
{
emited = true;
if (GET_CODE (src_bb->end) == JUMP_INSN)
emit_insn_before (mode_set, src_bb->end);
if (GET_CODE (BB_END (src_bb)) == JUMP_INSN)
emit_insn_before (mode_set, BB_END (src_bb));
/* It doesn't make sense to switch to normal mode
after a CALL_INSN, so we're going to abort if we
find one. The cases in which a CALL_INSN may
@ -1214,8 +1214,8 @@ optimize_mode_switching (FILE *file)
the call (it wouldn't make sense, anyway). In
the case of EH edges, EH entry points also start
in normal mode, so a similar reasoning applies. */
else if (GET_CODE (src_bb->end) == INSN)
emit_insn_after (mode_set, src_bb->end);
else if (GET_CODE (BB_END (src_bb)) == INSN)
emit_insn_after (mode_set, BB_END (src_bb));
else
abort ();
bb_info[j][src_bb->index].computing = mode;

View File

@ -809,7 +809,9 @@ update_equiv_regs (void)
{
loop_depth = bb->loop_depth;
for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb);
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
rtx note;
rtx set;
@ -1023,7 +1025,9 @@ update_equiv_regs (void)
FOR_EACH_BB_REVERSE (bb)
{
loop_depth = bb->loop_depth;
for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
for (insn = BB_END (bb);
insn != PREV_INSN (BB_HEAD (bb));
insn = PREV_INSN (insn))
{
rtx link;
@ -1117,8 +1121,8 @@ update_equiv_regs (void)
REG_N_CALLS_CROSSED (regno) = 0;
REG_LIVE_LENGTH (regno) = 2;
if (insn == bb->head)
bb->head = PREV_INSN (insn);
if (insn == BB_HEAD (bb))
BB_HEAD (bb) = PREV_INSN (insn);
/* Remember to clear REGNO from all basic block's live
info. */
@ -1204,13 +1208,13 @@ block_alloc (int b)
/* Count the instructions in the basic block. */
insn = BLOCK_END (b);
insn = BB_END (BASIC_BLOCK (b));
while (1)
{
if (GET_CODE (insn) != NOTE)
if (++insn_count > max_uid)
abort ();
if (insn == BLOCK_HEAD (b))
if (insn == BB_HEAD (BASIC_BLOCK (b)))
break;
insn = PREV_INSN (insn);
}
@ -1227,7 +1231,7 @@ block_alloc (int b)
and assigns quantities to registers.
It computes which registers to tie. */
insn = BLOCK_HEAD (b);
insn = BB_HEAD (BASIC_BLOCK (b));
while (1)
{
if (GET_CODE (insn) != NOTE)
@ -1459,7 +1463,7 @@ block_alloc (int b)
IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
if (insn == BLOCK_END (b))
if (insn == BB_END (BASIC_BLOCK (b)))
break;
insn = NEXT_INSN (insn);

View File

@ -126,7 +126,7 @@ may_unswitch_on_p (struct loops *loops, basic_block bb, struct loop *loop,
/* BB must end in a simple conditional jump. */
if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
return false;
if (!any_condjump_p (bb->end))
if (!any_condjump_p (BB_END (bb)))
return false;
/* With branches inside loop. */
@ -141,12 +141,12 @@ may_unswitch_on_p (struct loops *loops, basic_block bb, struct loop *loop,
/* Condition must be invariant. We use just a stupid test of invariantness
of the condition: all used regs must not be modified inside loop body. */
test = get_condition (bb->end, NULL, true);
test = get_condition (BB_END (bb), NULL, true);
if (!test)
return false;
for (i = 0; i < loop->num_nodes; i++)
if (modified_between_p (test, body[i]->head, NEXT_INSN (body[i]->end)))
if (modified_between_p (test, BB_HEAD (body[i]), NEXT_INSN (BB_END (body[i]))))
return false;
return true;
@ -248,7 +248,7 @@ unswitch_single_loop (struct loops *loops, struct loop *loop,
return;
}
if (!(cond = get_condition (bbs[i]->end, &split_before, true)))
if (!(cond = get_condition (BB_END (bbs[i]), &split_before, true)))
abort ();
rcond = reversed_condition (cond);
@ -345,7 +345,7 @@ unswitch_loop (struct loops *loops, struct loop *loop, basic_block unswitch_on)
abort ();
/* Will we be able to perform redirection? */
if (!any_condjump_p (unswitch_on->end))
if (!any_condjump_p (BB_END (unswitch_on)))
return NULL;
if (!cfg_layout_can_duplicate_bb_p (unswitch_on))
return NULL;

View File

@ -10697,18 +10697,18 @@ loop_dump_aux (const struct loop *loop, FILE *file,
/* Print diagnostics to compare our concept of a loop with
what the loop notes say. */
if (! PREV_INSN (loop->first->head)
|| GET_CODE (PREV_INSN (loop->first->head)) != NOTE
|| NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
if (! PREV_INSN (BB_HEAD (loop->first))
|| GET_CODE (PREV_INSN (BB_HEAD (loop->first))) != NOTE
|| NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (loop->first)))
!= NOTE_INSN_LOOP_BEG)
fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
INSN_UID (PREV_INSN (loop->first->head)));
if (! NEXT_INSN (loop->last->end)
|| GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
|| NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
INSN_UID (PREV_INSN (BB_HEAD (loop->first))));
if (! NEXT_INSN (BB_END (loop->last))
|| GET_CODE (NEXT_INSN (BB_END (loop->last))) != NOTE
|| NOTE_LINE_NUMBER (NEXT_INSN (BB_END (loop->last)))
!= NOTE_INSN_LOOP_END)
fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
INSN_UID (NEXT_INSN (loop->last->end)));
INSN_UID (NEXT_INSN (BB_END (loop->last))));
if (loop->start)
{

View File

@ -664,7 +664,7 @@ reload_combine (void)
FOR_EACH_BB_REVERSE (bb)
{
insn = bb->head;
insn = BB_HEAD (bb);
if (GET_CODE (insn) == CODE_LABEL)
{
HARD_REG_SET live;

View File

@ -154,9 +154,9 @@ static bool
predicted_by_p (basic_block bb, enum br_predictor predictor)
{
rtx note;
if (!INSN_P (bb->end))
if (!INSN_P (BB_END (bb)))
return false;
for (note = REG_NOTES (bb->end); note; note = XEXP (note, 1))
for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_BR_PRED
&& INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
return true;
@ -199,7 +199,7 @@ void
predict_edge (edge e, enum br_predictor predictor, int probability)
{
rtx last_insn;
last_insn = e->src->end;
last_insn = BB_END (e->src);
/* We can store the branch prediction information only about
conditional jumps. */
@ -445,7 +445,7 @@ estimate_probability (struct loops *loops_info)
statements construct loops via "non-loop" constructs
in the source language and are better to be handled
separately. */
if (!can_predict_insn_p (bb->end)
if (!can_predict_insn_p (BB_END (bb))
|| predicted_by_p (bb, PRED_CONTINUE))
continue;
@ -476,7 +476,7 @@ estimate_probability (struct loops *loops_info)
/* Attempt to predict conditional jumps using a number of heuristics. */
FOR_EACH_BB (bb)
{
rtx last_insn = bb->end;
rtx last_insn = BB_END (bb);
rtx cond, earliest;
edge e;
@ -509,7 +509,7 @@ estimate_probability (struct loops *loops_info)
is improbable. This is because such calls are often used
to signal exceptional situations such as printing error
messages. */
for (insn = e->dest->head; insn != NEXT_INSN (e->dest->end);
for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN
/* Constant and pure calls are hardly used to signalize
@ -613,10 +613,10 @@ estimate_probability (struct loops *loops_info)
/* Attach the combined probability to each conditional jump. */
FOR_EACH_BB (bb)
if (GET_CODE (bb->end) == JUMP_INSN
&& any_condjump_p (bb->end)
if (GET_CODE (BB_END (bb)) == JUMP_INSN
&& any_condjump_p (BB_END (bb))
&& bb->succ->succ_next != NULL)
combine_predictions_for_insn (bb->end, bb);
combine_predictions_for_insn (BB_END (bb), bb);
free_dominance_info (post_dominators);
free_dominance_info (dominators);
@ -765,7 +765,7 @@ process_note_prediction (basic_block bb, int *heads,
/* Now find the edge that leads to our branch and aply the prediction. */
if (y == last_basic_block || !can_predict_insn_p (BASIC_BLOCK (y)->end))
if (y == last_basic_block || !can_predict_insn_p (BB_END (BASIC_BLOCK (y))))
return;
for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
if (e->dest->index >= 0
@ -790,8 +790,8 @@ process_note_predictions (basic_block bb, int *heads,
int was_bb_head = 0;
int noreturn_block = 1;
for (insn = bb->end; insn;
was_bb_head |= (insn == bb->head), insn = PREV_INSN (insn))
for (insn = BB_END (bb); insn;
was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
{
if (GET_CODE (insn) != NOTE)
{
@ -1105,7 +1105,7 @@ expensive_function_p (int threshold)
{
rtx insn;
for (insn = bb->head; insn != NEXT_INSN (bb->end);
for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (active_insn_p (insn))
{
@ -1149,7 +1149,7 @@ estimate_bb_frequencies (struct loops *loops)
notes. */
FOR_EACH_BB (bb)
{
rtx last_insn = bb->end;
rtx last_insn = BB_END (bb);
if (!can_predict_insn_p (last_insn))
{

View File

@ -512,10 +512,10 @@ compute_branch_probabilities (void)
|| (e->count > bb->count
&& e->dest != EXIT_BLOCK_PTR))
{
rtx insn = bb->end;
rtx insn = BB_END (bb);
while (GET_CODE (insn) != CALL_INSN
&& insn != bb->head
&& insn != BB_HEAD (bb)
&& keep_with_call_p (insn))
insn = PREV_INSN (insn);
if (GET_CODE (insn) == CALL_INSN)
@ -534,7 +534,7 @@ compute_branch_probabilities (void)
for (e = bb->succ; e; e = e->succ_next)
e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
if (bb->index >= 0
&& any_condjump_p (bb->end)
&& any_condjump_p (BB_END (bb))
&& bb->succ->succ_next)
{
int prob;
@ -554,15 +554,15 @@ compute_branch_probabilities (void)
index = 19;
hist_br_prob[index]++;
note = find_reg_note (bb->end, REG_BR_PROB, 0);
note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
/* There may be already note put by some other pass, such
as builtin_expect expander. */
if (note)
XEXP (note, 0) = GEN_INT (prob);
else
REG_NOTES (bb->end)
REG_NOTES (BB_END (bb))
= gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
REG_NOTES (bb->end));
REG_NOTES (BB_END (bb)));
num_branches++;
}
}
@ -594,7 +594,7 @@ compute_branch_probabilities (void)
e->probability = REG_BR_PROB_BASE / total;
}
if (bb->index >= 0
&& any_condjump_p (bb->end)
&& any_condjump_p (BB_END (bb))
&& bb->succ->succ_next)
num_branches++, num_never_executed;
}
@ -892,7 +892,7 @@ branch_prob (void)
FOR_EACH_BB (bb)
{
rtx insn = bb->head;
rtx insn = BB_HEAD (bb);
int ignore_next_note = 0;
offset = 0;
@ -905,7 +905,7 @@ branch_prob (void)
else
insn = NEXT_INSN (insn);
while (insn != bb->end)
while (insn != BB_END (bb))
{
if (GET_CODE (insn) == NOTE)
{

View File

@ -884,7 +884,7 @@ live_in_edge (struct df *df, struct curr_use *use, edge e)
use->live_over_abnormal = 1;
bitmap_set_bit (live_at_end[e->src->index], DF_REF_ID (use->wp->ref));
info_pred = (struct ra_bb_info *) e->src->aux;
next_insn = e->src->end;
next_insn = BB_END (e->src);
/* If the last insn of the pred. block doesn't completely define the
current use, we need to check the block. */
@ -899,7 +899,7 @@ live_in_edge (struct df *df, struct curr_use *use, edge e)
creation to later. */
bitmap_set_bit (info_pred->live_throughout,
DF_REF_ID (use->wp->ref));
next_insn = e->src->head;
next_insn = BB_HEAD (e->src);
}
return next_insn;
}
@ -1033,7 +1033,7 @@ livethrough_conflicts_bb (basic_block bb)
/* First collect the IDs of all defs, count the number of death
containing insns, and if there's some call_insn here. */
all_defs = BITMAP_XMALLOC ();
for (insn = bb->head; insn; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
@ -1048,7 +1048,7 @@ livethrough_conflicts_bb (basic_block bb)
if (GET_CODE (insn) == CALL_INSN)
contains_call = 1;
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
@ -2674,10 +2674,10 @@ detect_webs_set_in_cond_jump (void)
{
basic_block bb;
FOR_EACH_BB (bb)
if (GET_CODE (bb->end) == JUMP_INSN)
if (GET_CODE (BB_END (bb)) == JUMP_INSN)
{
struct df_link *link;
for (link = DF_INSN_DEFS (df, bb->end); link; link = link->next)
for (link = DF_INSN_DEFS (df, BB_END (bb)); link; link = link->next)
if (link->ref && DF_REF_REGNO (link->ref) >= FIRST_PSEUDO_REGISTER)
{
struct web *web = def2web[DF_REF_ID (link->ref)];

View File

@ -528,11 +528,12 @@ ra_debug_bbi (int bbi)
{
basic_block bb = BASIC_BLOCK (bbi);
rtx insn;
for (insn = bb->head; insn; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn; insn = NEXT_INSN (insn))
{
ra_print_rtx_top (stderr, insn, (insn == bb->head || insn == bb->end));
ra_print_rtx_top (stderr, insn,
(insn == BB_HEAD (bb) || insn == BB_END (bb)));
fprintf (stderr, "\n");
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}
@ -911,7 +912,7 @@ dump_static_insn_cost (FILE *file, const char *message, const char *prefix)
{
unsigned HOST_WIDE_INT block_cost = bb->frequency;
rtx insn, set;
for (insn = bb->head; insn; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); insn; insn = NEXT_INSN (insn))
{
/* Yes, yes. We don't calculate the costs precisely.
Only for "simple enough" insns. Those containing single
@ -950,7 +951,7 @@ dump_static_insn_cost (FILE *file, const char *message, const char *prefix)
pcost->count++;
}
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}

View File

@ -444,8 +444,8 @@ rewrite_program (bitmap new_deaths)
end_sequence ();
emit_insn_before (insns, insn);
if (bb->head == insn)
bb->head = NEXT_INSN (prev);
if (BB_HEAD (bb) == insn)
BB_HEAD (bb) = NEXT_INSN (prev);
for (insn = PREV_INSN (insn); insn != prev;
insn = PREV_INSN (insn))
{
@ -492,8 +492,8 @@ rewrite_program (bitmap new_deaths)
if (insns)
{
emit_insn_after (insns, insn);
if (bb->end == insn)
bb->end = PREV_INSN (following);
if (BB_END (bb) == insn)
BB_END (bb) = PREV_INSN (following);
for (insn = insns; insn != following; insn = NEXT_INSN (insn))
{
set_block_for_insn (insn, bb);
@ -685,8 +685,8 @@ insert_stores (bitmap new_deaths)
if (insns)
{
emit_insn_after (insns, insn);
if (bb->end == insn)
bb->end = PREV_INSN (following);
if (BB_END (bb) == insn)
BB_END (bb) = PREV_INSN (following);
for (ni = insns; ni != following; ni = NEXT_INSN (ni))
{
set_block_for_insn (ni, bb);
@ -941,8 +941,8 @@ emit_loads (struct rewrite_info *ri, int nl_first_reload, rtx last_block_insn)
rtx foll = NEXT_INSN (after);
bb = BLOCK_FOR_INSN (after);
emit_insn_after (ni, after);
if (bb->end == after)
bb->end = PREV_INSN (foll);
if (BB_END (bb) == after)
BB_END (bb) = PREV_INSN (foll);
for (ni = NEXT_INSN (after); ni != foll; ni = NEXT_INSN (ni))
{
set_block_for_insn (ni, bb);
@ -954,8 +954,8 @@ emit_loads (struct rewrite_info *ri, int nl_first_reload, rtx last_block_insn)
rtx prev = PREV_INSN (before);
bb = BLOCK_FOR_INSN (before);
emit_insn_before (ni, before);
if (bb->head == before)
bb->head = NEXT_INSN (prev);
if (BB_HEAD (bb) == before)
BB_HEAD (bb) = NEXT_INSN (prev);
for (; ni != before; ni = NEXT_INSN (ni))
{
set_block_for_insn (ni, bb);

View File

@ -665,7 +665,7 @@ reg_alloc (void)
for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
{
basic_block bb = e->src;
last = bb->end;
last = BB_END (bb);
if (!INSN_P (last) || GET_CODE (PATTERN (last)) != USE)
{
rtx insns;

View File

@ -2696,12 +2696,12 @@ split_all_insns (int upd_life)
rtx insn, next;
bool finish = false;
for (insn = bb->head; !finish ; insn = next)
for (insn = BB_HEAD (bb); !finish ; insn = next)
{
/* Can't use `next_real_insn' because that might go across
CODE_LABELS and short-out basic blocks. */
next = NEXT_INSN (insn);
finish = (insn == bb->end);
finish = (insn == BB_END (bb));
if (INSN_P (insn))
{
rtx set = single_set (insn);
@ -3042,7 +3042,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
#endif
for (insn = bb->end; ; insn = prev)
for (insn = BB_END (bb); ; insn = prev)
{
prev = PREV_INSN (insn);
if (INSN_P (insn))
@ -3158,7 +3158,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
XEXP (note, 0),
REG_NOTES (x));
if (x != bb->end && eh_edge)
if (x != BB_END (bb) && eh_edge)
{
edge nfte, nehe;
int flags;
@ -3242,7 +3242,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
}
}
if (insn == bb->head)
if (insn == BB_HEAD (bb))
break;
}

View File

@ -336,7 +336,7 @@ next_flags_user (rtx insn)
/* Search forward looking for the first use of this value.
Stop at block boundaries. */
while (insn != current_block->end)
while (insn != BB_END (current_block))
{
insn = NEXT_INSN (insn);
@ -973,10 +973,10 @@ emit_swap_insn (rtx insn, stack regstack, rtx reg)
/* Find the previous insn involving stack regs, but don't pass a
block boundary. */
i1 = NULL;
if (current_block && insn != current_block->head)
if (current_block && insn != BB_HEAD (current_block))
{
rtx tmp = PREV_INSN (insn);
rtx limit = PREV_INSN (current_block->head);
rtx limit = PREV_INSN (BB_HEAD (current_block));
while (tmp != limit)
{
if (GET_CODE (tmp) == CODE_LABEL
@ -1022,7 +1022,7 @@ emit_swap_insn (rtx insn, stack regstack, rtx reg)
if (i1)
emit_insn_after (swap_rtx, i1);
else if (current_block)
emit_insn_before (swap_rtx, current_block->head);
emit_insn_before (swap_rtx, BB_HEAD (current_block));
else
emit_insn_before (swap_rtx, insn);
}
@ -1232,7 +1232,7 @@ swap_rtx_condition (rtx insn)
/* Search forward looking for the first use of this value.
Stop at block boundaries. */
while (insn != current_block->end)
while (insn != BB_END (current_block))
{
insn = NEXT_INSN (insn);
if (INSN_P (insn) && reg_mentioned_p (dest, insn))
@ -2292,7 +2292,7 @@ change_stack (rtx insn, stack old, stack new, enum emit_where where)
if (where == EMIT_AFTER)
{
if (current_block && current_block->end == insn)
if (current_block && BB_END (current_block) == insn)
update_end = 1;
insn = NEXT_INSN (insn);
}
@ -2375,7 +2375,7 @@ change_stack (rtx insn, stack old, stack new, enum emit_where where)
}
if (update_end)
current_block->end = PREV_INSN (insn);
BB_END (current_block) = PREV_INSN (insn);
}
/* Print stack configuration. */
@ -2536,7 +2536,7 @@ compensate_edge (edge e, FILE *file)
/* change_stack kills values in regstack. */
tmpstack = regstack;
change_stack (block->end, &tmpstack, target_stack, EMIT_AFTER);
change_stack (BB_END (block), &tmpstack, target_stack, EMIT_AFTER);
return false;
}
@ -2607,8 +2607,8 @@ compensate_edge (edge e, FILE *file)
/* change_stack kills values in regstack. */
tmpstack = regstack;
change_stack (block->end, &tmpstack, target_stack,
(GET_CODE (block->end) == JUMP_INSN
change_stack (BB_END (block), &tmpstack, target_stack,
(GET_CODE (BB_END (block)) == JUMP_INSN
? EMIT_BEFORE : EMIT_AFTER));
}
else
@ -2714,7 +2714,7 @@ convert_regs_1 (FILE *file, basic_block block)
/* Process all insns in this block. Keep track of NEXT so that we
don't process insns emitted while substituting in INSN. */
next = block->head;
next = BB_HEAD (block);
regstack = bi->stack_in;
do
{
@ -2724,7 +2724,7 @@ convert_regs_1 (FILE *file, basic_block block)
/* Ensure we have not missed a block boundary. */
if (next == NULL)
abort ();
if (insn == block->end)
if (insn == BB_END (block))
next = NULL;
/* Don't bother processing unless there is a stack reg
@ -2753,7 +2753,7 @@ convert_regs_1 (FILE *file, basic_block block)
print_stack (file, &regstack);
}
insn = block->end;
insn = BB_END (block);
if (GET_CODE (insn) == JUMP_INSN)
insn = PREV_INSN (insn);

View File

@ -1085,8 +1085,8 @@ scan_one_insn (rtx insn, int pass)
{
basic_block b;
FOR_EACH_BB (b)
if (insn == b->head)
b->head = newinsn;
if (insn == BB_HEAD (b))
BB_HEAD (b) = newinsn;
}
/* This makes one more setting of new insns's dest. */
@ -1240,10 +1240,10 @@ regclass (rtx f, int nregs, FILE *dump)
aggressive than the assumptions made elsewhere and is being
tried as an experiment. */
frequency = REG_FREQ_FROM_BB (bb);
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
insn = scan_one_insn (insn, pass);
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
}

View File

@ -256,8 +256,8 @@ mark_flags_life_zones (rtx flags)
rtx insn, end;
int live;
insn = block->head;
end = block->end;
insn = BB_HEAD (block);
end = BB_END (block);
/* Look out for the (unlikely) case of flags being live across
basic block boundaries. */
@ -810,7 +810,7 @@ copy_src_to_dest (rtx insn, rtx src, rtx dest, int old_max_uid)
bb = regmove_bb_head[insn_uid];
if (bb >= 0)
{
BLOCK_HEAD (bb) = move_insn;
BB_HEAD (BASIC_BLOCK (bb)) = move_insn;
regmove_bb_head[insn_uid] = -1;
}
}
@ -1061,7 +1061,7 @@ regmove_optimize (rtx f, int nregs, FILE *regmove_dump_file)
regmove_bb_head = xmalloc (sizeof (int) * (old_max_uid + 1));
for (i = old_max_uid; i >= 0; i--) regmove_bb_head[i] = -1;
FOR_EACH_BB (bb)
regmove_bb_head[INSN_UID (bb->head)] = bb->index;
regmove_bb_head[INSN_UID (BB_HEAD (bb))] = bb->index;
/* A forward/backward pass. Replace output operands with input operands. */
@ -1491,13 +1491,13 @@ regmove_optimize (rtx f, int nregs, FILE *regmove_dump_file)
ends. Fix that here. */
FOR_EACH_BB (bb)
{
rtx end = bb->end;
rtx end = BB_END (bb);
rtx new = end;
rtx next = NEXT_INSN (new);
while (next != 0 && INSN_UID (next) >= old_max_uid
&& (bb->next_bb == EXIT_BLOCK_PTR || bb->next_bb->head != next))
&& (bb->next_bb == EXIT_BLOCK_PTR || BB_HEAD (bb->next_bb) != next))
new = next, next = NEXT_INSN (new);
bb->end = new;
BB_END (bb) = new;
}
done:
@ -2304,9 +2304,9 @@ combine_stack_adjustments_for_block (basic_block bb)
struct record_stack_memrefs_data data;
bool end_of_block = false;
for (insn = bb->head; !end_of_block ; insn = next)
for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
{
end_of_block = insn == bb->end;
end_of_block = insn == BB_END (bb);
next = NEXT_INSN (insn);
if (! INSN_P (insn))

View File

@ -149,7 +149,7 @@ merge_overlapping_regs (basic_block b, HARD_REG_SET *pset,
HARD_REG_SET live;
REG_SET_TO_HARD_REG_SET (live, b->global_live_at_start);
insn = b->head;
insn = BB_HEAD (b);
while (t)
{
/* Search forward until the next reference to the register to be
@ -729,7 +729,7 @@ build_def_use (basic_block bb)
open_chains = closed_chains = NULL;
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
{
@ -954,7 +954,7 @@ build_def_use (basic_block bb)
scan_rtx (insn, &XEXP (note, 0), NO_REGS, terminate_dead,
OP_IN, 0);
}
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}
@ -1525,7 +1525,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
bool changed = false;
rtx insn;
for (insn = bb->head; ; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
int n_ops, i, alt, predicated;
bool is_asm;
@ -1533,7 +1533,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
if (! INSN_P (insn))
{
if (insn == bb->end)
if (insn == BB_END (bb))
break;
else
continue;
@ -1709,7 +1709,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
if (set && REG_P (SET_DEST (set)) && REG_P (SET_SRC (set)))
copy_value (SET_DEST (set), SET_SRC (set), vd);
if (insn == bb->end)
if (insn == BB_END (bb))
break;
}

View File

@ -7963,9 +7963,10 @@ fixup_abnormal_edges (void)
== (EDGE_ABNORMAL | EDGE_EH))
break;
}
if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
if (e && GET_CODE (BB_END (bb)) != CALL_INSN
&& !can_throw_internal (BB_END (bb)))
{
rtx insn = bb->end, stop = NEXT_INSN (bb->end);
rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
rtx next;
for (e = bb->succ; e; e = e->succ_next)
if (e->flags & EDGE_FALLTHRU)
@ -7974,11 +7975,11 @@ fixup_abnormal_edges (void)
be already deleted. */
while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
&& !can_throw_internal (insn)
&& insn != bb->head)
&& insn != BB_HEAD (bb))
insn = PREV_INSN (insn);
if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
abort ();
bb->end = insn;
BB_END (bb) = insn;
inserted = true;
insn = NEXT_INSN (insn);
while (insn && insn != stop)

View File

@ -155,7 +155,7 @@ find_basic_block (rtx insn, int search_limit)
insn = next_nonnote_insn (insn))
{
FOR_EACH_BB (bb)
if (insn == bb->head)
if (insn == BB_HEAD (bb))
return bb->index;
}
@ -913,7 +913,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
information, we can get it from there unless the insn at the
start of the basic block has been deleted. */
if (tinfo && tinfo->block != -1
&& ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
&& ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
b = tinfo->block;
}
@ -979,7 +979,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
/* Get starting and ending insn, handling the case where each might
be a SEQUENCE. */
start_insn = (b == 0 ? insns : BLOCK_HEAD (b));
start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
stop_insn = target;
if (GET_CODE (start_insn) == INSN

View File

@ -218,8 +218,8 @@ add_missing_bbs (rtx before, basic_block first, basic_block last)
{
before = emit_note_before (NOTE_INSN_BASIC_BLOCK, before);
NOTE_BASIC_BLOCK (before) = last;
last->head = before;
last->end = before;
BB_HEAD (last) = before;
BB_END (last) = before;
update_bb_for_insn (last);
}
}
@ -233,10 +233,10 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
rtx tail)
{
rtx insn = head;
rtx last_inside = bb->head;
rtx last_inside = BB_HEAD (bb);
rtx aftertail = NEXT_INSN (tail);
head = bb->head;
head = BB_HEAD (bb);
for (; insn != aftertail; insn = NEXT_INSN (insn))
{
@ -299,9 +299,9 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
if (f)
{
last = curr_bb = split_edge (f);
h = curr_bb->head;
curr_bb->head = head;
curr_bb->end = insn;
h = BB_HEAD (curr_bb);
BB_HEAD (curr_bb) = head;
BB_END (curr_bb) = insn;
/* Edge splitting created misplaced BASIC_BLOCK note, kill
it. */
delete_insn (h);
@ -324,9 +324,9 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
}
else
{
curr_bb->head = head;
curr_bb->end = insn;
add_missing_bbs (curr_bb->head, bb, curr_bb->prev_bb);
BB_HEAD (curr_bb) = head;
BB_END (curr_bb) = insn;
add_missing_bbs (BB_HEAD (curr_bb), bb, curr_bb->prev_bb);
}
note = GET_CODE (head) == CODE_LABEL ? NEXT_INSN (head) : head;
NOTE_BASIC_BLOCK (note) = curr_bb;
@ -337,7 +337,7 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
break;
}
}
add_missing_bbs (last->next_bb->head, bb, last);
add_missing_bbs (BB_HEAD (last->next_bb), bb, last);
return bb->prev_bb;
}
@ -442,7 +442,7 @@ add_deps_for_risky_insns (rtx head, rtx tail)
bb = bb->aux;
if (!bb)
break;
prev = bb->end;
prev = BB_END (bb);
}
}
/* FALLTHRU */
@ -585,15 +585,15 @@ schedule_ebbs (FILE *dump_file)
/* Schedule every region in the subroutine. */
FOR_EACH_BB (bb)
{
rtx head = bb->head;
rtx head = BB_HEAD (bb);
rtx tail;
for (;;)
{
edge e;
tail = bb->end;
tail = BB_END (bb);
if (bb->next_bb == EXIT_BLOCK_PTR
|| GET_CODE (bb->next_bb->head) == CODE_LABEL)
|| GET_CODE (BB_HEAD (bb->next_bb)) == CODE_LABEL)
break;
for (e = bb->succ; e; e = e->succ_next)
if ((e->flags & EDGE_FALLTHRU) != 0)

View File

@ -343,7 +343,7 @@ is_cfg_nonregular (void)
the cfg not well structured. */
/* Check for labels referred to other thn by jumps. */
FOR_EACH_BB (b)
for (insn = b->head;; insn = NEXT_INSN (insn))
for (insn = BB_HEAD (b); ; insn = NEXT_INSN (insn))
{
code = GET_CODE (insn);
if (GET_RTX_CLASS (code) == 'i' && code != JUMP_INSN)
@ -357,7 +357,7 @@ is_cfg_nonregular (void)
return 1;
}
if (insn == b->end)
if (insn == BB_END (b))
break;
}
@ -558,8 +558,8 @@ static int
too_large (int block, int *num_bbs, int *num_insns)
{
(*num_bbs)++;
(*num_insns) += (INSN_LUID (BLOCK_END (block)) -
INSN_LUID (BLOCK_HEAD (block)));
(*num_insns) += (INSN_LUID (BB_END (BASIC_BLOCK (block))) -
INSN_LUID (BB_HEAD (BASIC_BLOCK (block))));
if ((*num_bbs > MAX_RGN_BLOCKS) || (*num_insns > MAX_RGN_INSNS))
return 1;
else
@ -852,8 +852,8 @@ find_rgns (struct edge_list *edge_list, dominance_info dom)
/* Estimate # insns, and count # blocks in the region. */
num_bbs = 1;
num_insns = (INSN_LUID (bb->end)
- INSN_LUID (bb->head));
num_insns = (INSN_LUID (BB_END (bb))
- INSN_LUID (BB_HEAD (bb)));
/* Find all loop latches (blocks with back edges to the loop
header) or all the leaf blocks in the cfg has no loops.
@ -1839,28 +1839,28 @@ can_schedule_ready_p (rtx insn)
/* Update source block boundaries. */
b1 = BLOCK_FOR_INSN (insn);
if (insn == b1->head && insn == b1->end)
if (insn == BB_HEAD (b1) && insn == BB_END (b1))
{
/* We moved all the insns in the basic block.
Emit a note after the last insn and update the
begin/end boundaries to point to the note. */
rtx note = emit_note_after (NOTE_INSN_DELETED, insn);
b1->head = note;
b1->end = note;
BB_HEAD (b1) = note;
BB_END (b1) = note;
}
else if (insn == b1->end)
else if (insn == BB_END (b1))
{
/* We took insns from the end of the basic block,
so update the end of block boundary so that it
points to the first insn we did not move. */
b1->end = PREV_INSN (insn);
BB_END (b1) = PREV_INSN (insn);
}
else if (insn == b1->head)
else if (insn == BB_HEAD (b1))
{
/* We took insns from the start of the basic block,
so update the start of block boundary so that
it points to the first insn we did not move. */
b1->head = NEXT_INSN (insn);
BB_HEAD (b1) = NEXT_INSN (insn);
}
}
else
@ -2516,10 +2516,10 @@ schedule_region (int rgn)
sched_rgn_n_insns += sched_n_insns;
/* Update target block boundaries. */
if (head == BLOCK_HEAD (b))
BLOCK_HEAD (b) = current_sched_info->head;
if (tail == BLOCK_END (b))
BLOCK_END (b) = current_sched_info->tail;
if (head == BB_HEAD (BASIC_BLOCK (b)))
BB_HEAD (BASIC_BLOCK (b)) = current_sched_info->head;
if (tail == BB_END (BASIC_BLOCK (b)))
BB_END (BASIC_BLOCK (b)) = current_sched_info->tail;
/* Clean up. */
if (current_nr_blocks > 1)

View File

@ -597,7 +597,7 @@ optimize_sibling_and_tail_recursive_calls (void)
/* Walk forwards through the last normal block and see if it
does nothing except fall into the exit block. */
for (insn = EXIT_BLOCK_PTR->prev_bb->head;
for (insn = BB_HEAD (EXIT_BLOCK_PTR->prev_bb);
insn;
insn = NEXT_INSN (insn))
{
@ -685,7 +685,7 @@ optimize_sibling_and_tail_recursive_calls (void)
&& call_block->succ->dest != alternate_exit)
/* If this call doesn't end the block, there are operations at
the end of the block which we must execute after returning. */
|| ! call_ends_block_p (insn, call_block->end))
|| ! call_ends_block_p (insn, BB_END (call_block)))
sibcall = 0, tailrecursion = 0;
/* Select a set of insns to implement the call and emit them.

View File

@ -85,7 +85,9 @@ count_insns (basic_block bb)
rtx insn;
int n = 0;
for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
for (insn = BB_HEAD (bb);
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (active_insn_p (insn))
n++;
return n;