basic-block.h (struct basic_block_def): Reorder fields to eliminate interior padding.

* basic-block.h (struct basic_block_def): Reorder fields to
	eliminate interior padding.  Remove 'partition' field.
	(BB_DISABLE_SCHEDULE, BB_HOT_PARTITION, BB_COLD_PARTITION)
	(BB_UNPARTITIONED, BB_PARTITION, BB_SET_PARTITION)
	(BB_COPY_PARTITION): New macros.
	* bb-reorder.c, cfgcleanup.c, cfglayout.c, cfgrtl.c, ifcvt.c
	Replace all references to the 'partition' field of a basic
	block with new macros.

	* insn-notes.def: Delete NOTE_INSN_DISABLE_SCHED_OF_BLOCK.
	* final.c (final_scan_insn): Don't handle it.
	* modulo-sched.c: Set BB_DISABLE_SCHEDULE flag on g->bb
	instead of emitting a NOTE_INSN_DISABLE_SCHED_OF_BLOCK note.
	* sched-rgn.c (sched_is_disabled_for_current_region_p):
	Look for a BB_DISABLE_SCHEDULE flag on the block instead of a note.

From-SVN: r86495
This commit is contained in:
Zack Weinberg 2004-08-24 16:46:32 +00:00
parent ae51017be0
commit 076c7ab896
11 changed files with 101 additions and 101 deletions

View File

@ -1,3 +1,21 @@
2004-08-24 Zack Weinberg <zack@codesourcery.com>
* basic-block.h (struct basic_block_def): Reorder fields to
eliminate interior padding. Remove 'partition' field.
(BB_DISABLE_SCHEDULE, BB_HOT_PARTITION, BB_COLD_PARTITION)
(BB_UNPARTITIONED, BB_PARTITION, BB_SET_PARTITION)
(BB_COPY_PARTITION): New macros.
* bb-reorder.c, cfgcleanup.c, cfglayout.c, cfgrtl.c, ifcvt.c
Replace all references to the 'partition' field of a basic
block with new macros.
* insn-notes.def: Delete NOTE_INSN_DISABLE_SCHED_OF_BLOCK.
* final.c (final_scan_insn): Don't handle it.
* modulo-sched.c: Set BB_DISABLE_SCHEDULE flag on g->bb
instead of emitting a NOTE_INSN_DISABLE_SCHED_OF_BLOCK note.
* sched-rgn.c (sched_is_disabled_for_current_region_p):
Look for a BB_DISABLE_SCHEDULE flag on the block instead of a note.
2004-08-24 Nathan Sidwell <nathan@codesourcery.com>
* c-decl.c (c_init_decl_processing): Adjust
@ -20,7 +38,7 @@
* objc/objc-act.h (TREE_STATIC_TEMPLATE): Use TREE_PRIVATE.
2004-08-24 Richard Henderson <rth@redhat.com>
Andrew Pinski <apinski@apple.com>
Andrew Pinski <apinski@apple.com>
* gimplify.c (gimplify_array_ref_to_plus): Delete.
(gimplify_addr_expr): Do not call gimplify_array_ref_to_plus

View File

@ -249,39 +249,36 @@ struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")
/* Auxiliary info specific to a pass. */
PTR GTY ((skip (""))) aux;
/* The index of this block. */
int index;
/* Previous and next blocks in the chain. */
struct basic_block_def *prev_bb;
struct basic_block_def *next_bb;
/* The loop depth of this block. */
int loop_depth;
/* Innermost loop containing the block. */
struct loop * GTY ((skip (""))) loop_father;
/* The dominance and postdominance information node. */
struct et_node * GTY ((skip (""))) dom[2];
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
/* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
int frequency;
/* Various flags. See BB_* below. */
int flags;
/* Which section block belongs in, when partitioning basic blocks. */
int partition;
/* Previous and next blocks in the chain. */
struct basic_block_def *prev_bb;
struct basic_block_def *next_bb;
/* The data used by basic block copying and reordering functions. */
struct reorder_block_def * GTY ((skip (""))) rbi;
/* Annotations used at the tree level. */
struct bb_ann_d *tree_annotations;
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
/* The index of this block. */
int index;
/* The loop depth of this block. */
int loop_depth;
/* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
int frequency;
/* Various flags. See BB_* below. */
int flags;
};
typedef struct basic_block_def *basic_block;
@ -312,12 +309,18 @@ typedef struct reorder_block_def
#define BB_VISITED 8
#define BB_IRREDUCIBLE_LOOP 16
#define BB_SUPERBLOCK 32
#define BB_DISABLE_SCHEDULE 64
#define BB_HOT_PARTITION 128
#define BB_COLD_PARTITION 256
#define BB_UNPARTITIONED 0
/* Partitions, to be used when partitioning hot and cold basic blocks into
separate sections. */
#define UNPARTITIONED 0
#define HOT_PARTITION 1
#define COLD_PARTITION 2
#define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
#define BB_SET_PARTITION(bb, part) ((bb)->flags |= (part))
#define BB_COPY_PARTITION(dstbb, srcbb) \
BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
/* Number of basic blocks in the current function. */

View File

@ -203,7 +203,7 @@ push_to_next_round_p (basic_block bb, int round, int number_of_rounds,
next_round_is_last = round + 1 == number_of_rounds - 1;
cold_block = (flag_reorder_blocks_and_partition
&& bb->partition == COLD_PARTITION);
&& BB_PARTITION (bb) == BB_COLD_PARTITION);
block_not_hot_enough = (bb->frequency < exec_th
|| bb->count < count_th
@ -211,7 +211,7 @@ push_to_next_round_p (basic_block bb, int round, int number_of_rounds,
if (flag_reorder_blocks_and_partition
&& next_round_is_last
&& bb->partition != COLD_PARTITION)
&& BB_PARTITION (bb) != BB_COLD_PARTITION)
return false;
else if (there_exists_another_round
&& (cold_block || block_not_hot_enough))
@ -513,7 +513,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
&& e->dest->rbi->visited != *n_traces)
continue;
if (e->dest->partition == COLD_PARTITION
if (BB_PARTITION (e->dest) == BB_COLD_PARTITION
&& round < last_round)
continue;
@ -758,7 +758,7 @@ copy_bb (basic_block old_bb, edge e, basic_block bb, int trace)
basic_block new_bb;
new_bb = duplicate_block (old_bb, e);
new_bb->partition = old_bb->partition;
BB_COPY_PARTITION (new_bb, old_bb);
if (e->dest != new_bb)
abort ();
@ -811,7 +811,8 @@ bb_to_key (basic_block bb)
/* Do not start in probably never executed blocks. */
if (bb->partition == COLD_PARTITION || probably_never_executed_bb_p (bb))
if (BB_PARTITION (bb) == BB_COLD_PARTITION
|| probably_never_executed_bb_p (bb))
return BB_FREQ_MAX;
/* Prefer blocks whose predecessor is an end of some trace
@ -921,7 +922,7 @@ connect_traces (int n_traces, struct trace *traces)
if (flag_reorder_blocks_and_partition)
for (i = 0; i < n_traces; i++)
{
if (traces[i].first->partition == COLD_PARTITION)
if (BB_PARTITION (traces[i].first) == BB_COLD_PARTITION)
{
connected[i] = true;
cold_traces[i] = true;
@ -1249,7 +1250,7 @@ add_unlikely_executed_notes (void)
/* Add the UNLIKELY_EXECUTED_NOTES to each cold basic block. */
FOR_EACH_BB (bb)
if (bb->partition == COLD_PARTITION)
if (BB_PARTITION (bb) == BB_COLD_PARTITION)
mark_bb_for_unlikely_executed_section (bb);
}
@ -1272,10 +1273,10 @@ find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
FOR_EACH_BB (bb)
{
if (probably_never_executed_bb_p (bb))
bb->partition = COLD_PARTITION;
BB_SET_PARTITION (bb, BB_COLD_PARTITION);
else
{
bb->partition = HOT_PARTITION;
BB_SET_PARTITION (bb, BB_HOT_PARTITION);
has_hot_blocks = true;
}
}
@ -1288,7 +1289,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
if (e->dest->index >= 0)
{
e->dest->partition = HOT_PARTITION;
BB_SET_PARTITION (e->dest, BB_HOT_PARTITION);
break;
}
@ -1302,7 +1303,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (edge *crossing_edges,
{
if (e->src != ENTRY_BLOCK_PTR
&& e->dest != EXIT_BLOCK_PTR
&& e->src->partition != e->dest->partition)
&& BB_PARTITION (e->src) != BB_PARTITION (e->dest))
{
e->flags |= EDGE_CROSSING;
if (i == *max_idx)
@ -1535,8 +1536,8 @@ fix_up_fall_thru_edges (void)
/* Make sure new fall-through bb is in same
partition as bb it's falling through from. */
new_bb->partition = cur_bb->partition;
BB_COPY_PARTITION (new_bb, cur_bb);
new_bb->succ->flags |= EDGE_CROSSING;
}
@ -1735,8 +1736,7 @@ fix_crossing_conditional_branches (void)
/* Make sure new bb is in same partition as source
of conditional branch. */
new_bb->partition = cur_bb->partition;
BB_COPY_PARTITION (new_bb, cur_bb);
}
/* Make old jump branch to new bb. */

View File

@ -153,7 +153,7 @@ try_simplify_condjump (basic_block cbranch_block)
and cold sections. */
if (flag_reorder_blocks_and_partition
&& (jump_block->partition != jump_dest_block->partition
&& (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
|| (cbranch_jump_edge->flags & EDGE_CROSSING)))
return false;
@ -673,7 +673,7 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
and cold sections. */
if (flag_reorder_blocks_and_partition
&& (a->partition != b->partition
&& (BB_PARTITION (a) != BB_PARTITION (b)
|| find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)))
return;
@ -726,7 +726,7 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
if (flag_reorder_blocks_and_partition
&& (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
|| a->partition != b->partition))
|| BB_PARTITION (a) != BB_PARTITION (b)))
return;
real_b_end = BB_END (b);
@ -792,7 +792,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
if (flag_reorder_blocks_and_partition
&& (find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
|| find_reg_note (BB_END (c), REG_CROSSING_JUMP, NULL_RTX)
|| b->partition != c->partition))
|| BB_PARTITION (b) != BB_PARTITION (c)))
return NULL;
@ -1673,7 +1673,7 @@ try_crossjump_bb (int mode, basic_block bb)
and cold sections. */
if (flag_reorder_blocks_and_partition
&& (bb->pred->src->partition != bb->pred->pred_next->src->partition
&& (BB_PARTITION (bb->pred->src) != BB_PARTITION (bb->pred->pred_next->src)
|| (bb->pred->flags & EDGE_CROSSING)))
return false;

View File

@ -795,11 +795,11 @@ fixup_reorder_chain (void)
/* Make sure new bb is tagged for correct section (same as
fall-thru source). */
e_fall->src->partition = bb->pred->src->partition;
BB_COPY_PARTITION (e_fall->src, bb->pred->src);
if (flag_reorder_blocks_and_partition
&& targetm.have_named_sections)
{
if (bb->pred->src->partition == COLD_PARTITION)
if (BB_PARTITION (bb->pred->src) == BB_COLD_PARTITION)
{
rtx new_note;
rtx note = BB_HEAD (e_fall->src);
@ -1109,7 +1109,7 @@ cfg_layout_duplicate_bb (basic_block bb)
insn ? get_last_insn () : NULL,
EXIT_BLOCK_PTR->prev_bb);
new_bb->partition = bb->partition;
BB_COPY_PARTITION (new_bb, bb);
if (bb->rbi->header)
{
insn = bb->rbi->header;

View File

@ -313,7 +313,7 @@ create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
link_block (bb, after);
BASIC_BLOCK (bb->index) = bb;
update_bb_for_insn (bb);
bb->partition = UNPARTITIONED;
BB_SET_PARTITION (bb, BB_UNPARTITIONED);
/* Tag the block so that we know it has been used when considering
other basic block notes. */
@ -489,7 +489,7 @@ rtl_split_block (basic_block bb, void *insnp)
/* Create the new basic block. */
new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
new_bb->partition = bb->partition;
BB_COPY_PARTITION (new_bb, bb);
BB_END (bb) = insn;
/* Redirect the outgoing edges. */
@ -619,24 +619,26 @@ rtl_merge_blocks (basic_block a, basic_block b)
static bool
rtl_can_merge_blocks (basic_block a,basic_block b)
{
bool partitions_ok = true;
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections. */
and cold sections.
??? If two basic blocks could otherwise be merged (which implies
that the jump between the two is unconditional), and one is in a
hot section and the other is in a cold section, surely that means
that one of the section choices is wrong. */
if (flag_reorder_blocks_and_partition
&& (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
|| find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
|| a->partition != b->partition))
partitions_ok = false;
|| BB_PARTITION (a) != BB_PARTITION (b)))
return false;
/* There must be exactly one edge in between the blocks. */
return (a->succ && !a->succ->succ_next && a->succ->dest == b
&& !b->pred->pred_next && a != b
/* Must be simple edge. */
&& !(a->succ->flags & EDGE_COMPLEX)
&& partitions_ok
&& a->next_bb == b
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
@ -684,7 +686,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
if (flag_reorder_blocks_and_partition
&& (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
|| (src->partition != target->partition)))
|| BB_PARTITION (src) != BB_PARTITION (target)))
return NULL;
/* Verify that all targets will be TARGET. */
@ -1094,11 +1096,11 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
/* Make sure new block ends up in correct hot/cold section. */
jump_block->partition = e->src->partition;
BB_COPY_PARTITION (jump_block, e->src);
if (flag_reorder_blocks_and_partition
&& targetm.have_named_sections)
{
if (e->src->partition == COLD_PARTITION)
if (BB_PARTITION (jump_block) == BB_COLD_PARTITION)
{
rtx bb_note, new_note;
for (bb_note = BB_HEAD (jump_block);
@ -1110,7 +1112,6 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
bb_note);
NOTE_BASIC_BLOCK (new_note) = jump_block;
jump_block->partition = COLD_PARTITION;
}
if (JUMP_P (BB_END (jump_block))
&& !any_condjump_p (BB_END (jump_block))
@ -1354,12 +1355,13 @@ rtl_split_edge (edge edge_in)
&& NOTE_LINE_NUMBER (before) == NOTE_INSN_LOOP_END)
before = NEXT_INSN (before);
bb = create_basic_block (before, NULL, edge_in->src);
bb->partition = edge_in->src->partition;
BB_COPY_PARTITION (bb, edge_in->src);
}
else
{
bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
bb->partition = edge_in->dest->partition;
/* ??? Why not edge_in->dest->prev_bb here? */
BB_COPY_PARTITION (bb, edge_in->dest);
}
/* ??? This info is likely going to be out of date very soon. */
@ -1601,7 +1603,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
if (flag_reorder_blocks_and_partition
&& targetm.have_named_sections
&& e->src != ENTRY_BLOCK_PTR
&& e->src->partition == COLD_PARTITION
&& BB_PARTITION (e->src) == BB_COLD_PARTITION
&& !(e->flags & EDGE_CROSSING))
{
rtx bb_note, new_note, cur_insn;
@ -1987,7 +1989,7 @@ rtl_verify_flow_info_1 (void)
{
n_fallthru++, fallthru = e;
if ((e->flags & EDGE_CROSSING)
|| (e->src->partition != e->dest->partition
|| (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
&& e->src != ENTRY_BLOCK_PTR
&& e->dest != EXIT_BLOCK_PTR))
{
@ -2667,24 +2669,26 @@ cfg_layout_delete_block (basic_block bb)
static bool
cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
{
bool partitions_ok = true;
/* If we are partitioning hot/cold basic blocks, we don't want to
mess up unconditional or indirect jumps that cross between hot
and cold sections. */
and cold sections.
??? If two basic blocks could otherwise be merged (which implies
that the jump between the two is unconditional), and one is in a
hot section and the other is in a cold section, surely that means
that one of the section choices is wrong. */
if (flag_reorder_blocks_and_partition
&& (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
|| find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
|| a->partition != b->partition))
partitions_ok = false;
|| BB_PARTITION (a) != BB_PARTITION (b)))
return false;
/* There must be exactly one edge in between the blocks. */
return (a->succ && !a->succ->succ_next && a->succ->dest == b
&& !b->pred->pred_next && a != b
/* Must be simple edge. */
&& !(a->succ->flags & EDGE_COMPLEX)
&& partitions_ok
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */

View File

@ -1704,7 +1704,6 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
case NOTE_INSN_FUNCTION_END:
case NOTE_INSN_REPEATED_LINE_NUMBER:
case NOTE_INSN_EXPECTED_VALUE:
case NOTE_INSN_DISABLE_SCHED_OF_BLOCK:
break;
case NOTE_INSN_UNLIKELY_EXECUTED_CODE:

View File

@ -2909,7 +2909,8 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
{
new_bb->index = then_bb_index;
BASIC_BLOCK (then_bb_index) = new_bb;
new_bb->partition = test_bb->partition;
/* ??? Should be then_bb? */
BB_COPY_PARTITION (new_bb, test_bb);
}
/* We've possibly created jump to next insn, cleanup_cfg will solve that
later. */

View File

@ -98,11 +98,4 @@ INSN_NOTE (BASIC_BLOCK)
this a bit on the basic block structure. */
INSN_NOTE (UNLIKELY_EXECUTED_CODE)
/* Mark that a block shouldn't be scheduled. This is currently used
in modulo scheduling. Modulo scheduling adds this note to the
blocks of the modulo-scheduled loops to disable scheduling them in
the later traditional scheduling passes. FIXME: Make this a bit on
the basic block structure. */
INSN_NOTE (DISABLE_SCHED_OF_BLOCK)
#undef INSN_NOTE

View File

@ -1112,8 +1112,7 @@ sms_schedule (FILE *dump_file)
/* Mark this loop as software pipelined so the later
scheduling passes doesn't touch it. */
if (! flag_resched_modulo_sched)
emit_note_before (NOTE_INSN_DISABLE_SCHED_OF_BLOCK,
g->closing_branch->insn);
g->bb->flags |= BB_DISABLE_SCHEDULE;
generate_reg_moves (ps);
if (dump_file)

View File

@ -2338,28 +2338,11 @@ debug_dependencies (void)
static bool
sched_is_disabled_for_current_region_p (void)
{
rtx first_bb_insn, last_bb_insn, insn;
int bb;
for (bb = 0; bb < current_nr_blocks; bb++)
{
bool disable_sched = false;
/* Searching for NOTE_DISABLE_SCHED_OF_BLOCK note between the
start and end of the basic block. */
get_block_head_tail (BB_TO_BLOCK (bb), &first_bb_insn,
&last_bb_insn);
for (insn = last_bb_insn; insn != NULL && insn != first_bb_insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE
&& (NOTE_LINE_NUMBER (insn)
== NOTE_INSN_DISABLE_SCHED_OF_BLOCK))
{
disable_sched = true;
break;
}
if (! disable_sched)
return false;
}
if (!(BASIC_BLOCK (BB_TO_BLOCK (bb))->flags & BB_DISABLE_SCHEDULE))
return false;
return true;
}