* cfg.c, cfganal.c, cfgbuild.c: Reformatting and minor cleanups.

From-SVN: r48270
This commit is contained in:
Richard Kenner 2001-12-22 15:51:07 +00:00 committed by Richard Kenner
parent bfdade77da
commit 4891442b1f
4 changed files with 192 additions and 166 deletions

View File

@ -1,6 +1,7 @@
Sat Dec 22 08:59:50 2001 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* predict.c: Reformatting and minor cleanups.
* cfg.c, cfganal.c, cfgbuild.c: Likewise.
* expr.c (expand_expr, case ADDR_EXPR): Handling taking address of
SAVE_EXPR.

View File

@ -152,7 +152,7 @@ free_edge (e)
edge e;
{
n_edges--;
memset (e, 0, sizeof (*e));
memset (e, 0, sizeof *e);
e->succ_next = first_deleted_edge;
first_deleted_edge = e;
}
@ -177,6 +177,7 @@ clear_edges ()
free_edge (e);
e = next;
}
bb->succ = NULL;
bb->pred = NULL;
}
@ -189,6 +190,7 @@ clear_edges ()
free_edge (e);
e = next;
}
EXIT_BLOCK_PTR->pred = NULL;
ENTRY_BLOCK_PTR->succ = NULL;
@ -211,8 +213,8 @@ alloc_block ()
}
else
{
bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
memset (bb, 0, sizeof (*bb));
bb = (basic_block) obstack_alloc (&flow_obstack, sizeof *bb);
memset (bb, 0, sizeof *bb);
}
return bb;
}
@ -233,7 +235,7 @@ expunge_block (b)
}
/* Invalidate data to make bughunting easier. */
memset (b, 0, sizeof (*b));
memset (b, 0, sizeof *b);
b->index = -3;
basic_block_info->num_elements--;
n_basic_blocks--;
@ -253,11 +255,10 @@ cached_make_edge (edge_cache, src, dst, flags)
int use_edge_cache;
edge e;
/* Don't bother with edge cache for ENTRY or EXIT; there aren't that
many edges to them, and we didn't allocate memory for it. */
/* Don't bother with edge cache for ENTRY or EXIT, if there aren't that
many edges to them, or we didn't allocate memory for it. */
use_edge_cache = (edge_cache
&& src != ENTRY_BLOCK_PTR
&& dst != EXIT_BLOCK_PTR);
&& src != ENTRY_BLOCK_PTR && dst != EXIT_BLOCK_PTR);
/* Make sure we don't add duplicate edges. */
switch (use_edge_cache)
@ -289,8 +290,8 @@ cached_make_edge (edge_cache, src, dst, flags)
}
else
{
e = (edge) obstack_alloc (&flow_obstack, sizeof (*e));
memset (e, 0, sizeof (*e));
e = (edge) obstack_alloc (&flow_obstack, sizeof *e);
memset (e, 0, sizeof *e);
}
n_edges++;
@ -345,6 +346,7 @@ remove_edge (e)
edge last_succ = NULL;
edge tmp;
basic_block src, dest;
src = e->src;
dest = e->dest;
for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
@ -398,10 +400,12 @@ redirect_edge_succ_nodup (e, new_succ)
basic_block new_succ;
{
edge s;
/* Check whether the edge is already present. */
for (s = e->src->succ; s; s = s->succ_next)
if (s->dest == new_succ && s != e)
break;
if (s)
{
s->flags |= e->flags;
@ -412,6 +416,7 @@ redirect_edge_succ_nodup (e, new_succ)
}
else
redirect_edge_succ (e, new_succ);
return e;
}
@ -427,6 +432,7 @@ redirect_edge_pred (e, new_pred)
/* Disconnect the edge from the old predecessor block. */
for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
continue;
*pe = (*pe)->succ_next;
/* Reconnect the edge to the new predecessor block. */
@ -447,6 +453,7 @@ dump_flow_info (file)
if (REG_N_REFS (i))
{
enum reg_class class, altclass;
fprintf (file, "\nRegister %d used %d times across %d insns",
i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
if (REG_BASIC_BLOCK (i) >= 0)
@ -464,6 +471,7 @@ dump_flow_info (file)
fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
class = reg_preferred_class (i);
altclass = reg_alternate_class (i);
if (class != GENERAL_REGS || altclass != ALL_REGS)
@ -477,6 +485,7 @@ dump_flow_info (file)
reg_class_names[(int) class],
reg_class_names[(int) altclass]);
}
if (REG_POINTER (regno_reg_rtx[i]))
fprintf (file, "; pointer");
fprintf (file, ".\n");
@ -488,9 +497,10 @@ dump_flow_info (file)
basic_block bb = BASIC_BLOCK (i);
edge e;
fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
fprintf (file, "\nBasic block %d: first insn %d, last %d, ",
i, INSN_UID (bb->head), INSN_UID (bb->end));
fprintf (file, "loop_depth %d, count ", bb->loop_depth);
fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
fprintf (file, ", freq %i.\n", bb->frequency);
fprintf (file, "Predecessors: ");
@ -540,19 +550,17 @@ dump_edge_info (file, e, do_succ)
if (e->count)
{
fprintf (file, " count:");
fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
}
if (e->flags)
{
static const char * const bitnames[] = {
"fallthru", "ab", "abcall", "eh", "fake", "dfs_back"
};
static const char * const bitnames[]
= {"fallthru", "ab", "abcall", "eh", "fake", "dfs_back"};
int comma = 0;
int i, flags = e->flags;
fputc (' ', file);
fputc ('(', file);
fputs (" (", file);
for (i = 0; flags; i++)
if (flags & (1 << i))
{
@ -566,11 +574,13 @@ dump_edge_info (file, e, do_succ)
fprintf (file, "%d", i);
comma = 1;
}
fputc (')', file);
}
}
/* Simple routines to easily allocate AUX fields of basic blocks. */
static struct obstack block_aux_obstack;
static void *first_block_aux_obj = 0;
static struct obstack edge_aux_obstack;
@ -605,6 +615,7 @@ alloc_aux_for_blocks (size)
gcc_obstack_init (&block_aux_obstack);
initialized = 1;
}
/* Check whether AUX data are still allocated. */
else if (first_block_aux_obj)
abort ();
@ -612,8 +623,10 @@ alloc_aux_for_blocks (size)
if (size)
{
int i;
for (i = 0; i < n_basic_blocks; i++)
alloc_aux_for_block (BASIC_BLOCK (i), size);
alloc_aux_for_block (ENTRY_BLOCK_PTR, size);
alloc_aux_for_block (EXIT_BLOCK_PTR, size);
}
@ -628,6 +641,7 @@ clear_aux_for_blocks ()
for (i = 0; i < n_basic_blocks; i++)
BASIC_BLOCK (i)->aux = NULL;
ENTRY_BLOCK_PTR->aux = NULL;
EXIT_BLOCK_PTR->aux = NULL;
}
@ -675,9 +689,11 @@ alloc_aux_for_edges (size)
gcc_obstack_init (&edge_aux_obstack);
initialized = 1;
}
/* Check whether AUX data are still allocated. */
else if (first_edge_aux_obj)
abort ();
first_edge_aux_obj = (char *) obstack_alloc (&edge_aux_obstack, 0);
if (size)
{
@ -691,6 +707,7 @@ alloc_aux_for_edges (size)
bb = BASIC_BLOCK (i);
else
bb = ENTRY_BLOCK_PTR;
for (e = bb->succ; e; e = e->succ_next)
alloc_aux_for_edge (e, size);
}
@ -713,6 +730,7 @@ clear_aux_for_edges ()
bb = BASIC_BLOCK (i);
else
bb = ENTRY_BLOCK_PTR;
for (e = bb->succ; e; e = e->succ_next)
e->aux = NULL;
}

View File

@ -56,27 +56,28 @@ static bool need_fake_edge_p PARAMS ((rtx));
/* Return true if the block has no effect and only forwards control flow to
its single destination. */
bool
forwarder_block_p (bb)
basic_block bb;
{
rtx insn = bb->head;
rtx insn;
if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
|| !bb->succ || bb->succ->succ_next)
return false;
while (insn != bb->end)
{
if (INSN_P (insn) && active_insn_p (insn))
return false;
insn = NEXT_INSN (insn);
}
for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
if (INSN_P (insn) && active_insn_p (insn))
return false;
return (!INSN_P (insn)
|| (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
|| !active_insn_p (insn));
}
/* Return nonzero if we can reach target from src by falling through. */
bool
can_fallthru (src, target)
basic_block src, target;
@ -86,6 +87,7 @@ can_fallthru (src, target)
if (src->index + 1 == target->index && !active_insn_p (insn2))
insn2 = next_active_insn (insn2);
/* ??? Later we may add code to move jump tables offline. */
return next_active_insn (insn) == insn2;
}
@ -148,7 +150,6 @@ mark_dfs_back_edges ()
SET_BIT (visited, dest->index);
pre[dest->index] = prenum++;
if (dest->succ)
{
/* Since the DEST node has been visited for the first
@ -235,17 +236,17 @@ flow_call_edges_add (blocks)
{
for (i = 0; i < n_basic_blocks; i++)
bbs[bb_num++] = BASIC_BLOCK (i);
check_last_block = true;
}
else
{
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
{
bbs[bb_num++] = BASIC_BLOCK (i);
if (i == n_basic_blocks - 1)
check_last_block = true;
});
}
EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
{
bbs[bb_num++] = BASIC_BLOCK (i);
if (i == n_basic_blocks - 1)
check_last_block = true;
});
/* In the last basic block, before epilogue generation, there will be
a fallthru edge to EXIT. Special care is required if the last insn
@ -263,14 +264,15 @@ flow_call_edges_add (blocks)
&& need_fake_edge_p (BASIC_BLOCK (n_basic_blocks - 1)->end))
{
edge e;
for (e = BASIC_BLOCK (n_basic_blocks - 1)->succ; e; e = e->succ_next)
if (e->dest == EXIT_BLOCK_PTR)
break;
insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
commit_edge_insertions ();
}
/* Now add fake edges to the function exit for any non constant
calls since there is no way that we can determine if they will
return or not... */
@ -289,9 +291,10 @@ flow_call_edges_add (blocks)
edge e;
/* The above condition should be enough to verify that there is
no edge to the exit block in CFG already. Calling make_edge in
such case would make us to mark that edge as fake and remove it
later. */
no edge to the exit block in CFG already. Calling make_edge
in such case would make us to mark that edge as fake and
remove it later. */
#ifdef ENABLE_CHECKING
if (insn == bb->end)
for (e = bb->succ; e; e = e->succ_next)
@ -307,6 +310,7 @@ flow_call_edges_add (blocks)
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
}
if (insn == bb->head)
break;
}
@ -318,6 +322,7 @@ flow_call_edges_add (blocks)
free (bbs);
return blocks_split;
}
/* Find unreachable blocks. An unreachable block will have 0 in
the reachable bit in block->flags. A non-zero value indicates the
block is reachable. */
@ -401,6 +406,7 @@ create_edge_list ()
for (e = bb->succ; e; e = e->succ_next)
num_edges++;
}
/* Don't forget successors of the entry block. */
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
num_edges++;
@ -414,10 +420,7 @@ create_edge_list ()
/* Follow successors of the entry block, and register these edges. */
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
{
elist->index_to_edge[num_edges] = e;
num_edges++;
}
elist->index_to_edge[num_edges++] = e;
for (x = 0; x < n_basic_blocks; x++)
{
@ -425,11 +428,9 @@ create_edge_list ()
/* Follow all successors of blocks, and register these edges. */
for (e = bb->succ; e; e = e->succ_next)
{
elist->index_to_edge[num_edges] = e;
num_edges++;
}
elist->index_to_edge[num_edges++] = e;
}
return elist;
}
@ -454,6 +455,7 @@ print_edge_list (f, elist)
struct edge_list *elist;
{
int x;
fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
elist->num_blocks - 2, elist->num_edges);
@ -498,6 +500,7 @@ verify_edge_list (f, elist)
fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
continue;
}
if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
fprintf (f, "*p* Pred for index %d should be %d not %d\n",
index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
@ -506,6 +509,7 @@ verify_edge_list (f, elist)
index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
}
}
for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
{
pred = e->src->index;
@ -516,6 +520,7 @@ verify_edge_list (f, elist)
fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
continue;
}
if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
fprintf (f, "*p* Pred for index %d should be %d not %d\n",
index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
@ -523,6 +528,7 @@ verify_edge_list (f, elist)
fprintf (f, "*p* Succ for index %d should be %d not %d\n",
index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
}
/* We've verified that all the edges are in the list, no lets make sure
there are no spurious edges in the list. */
@ -531,7 +537,6 @@ verify_edge_list (f, elist)
{
basic_block p = BASIC_BLOCK (pred);
basic_block s = BASIC_BLOCK (succ);
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
@ -540,12 +545,14 @@ verify_edge_list (f, elist)
found_edge = 1;
break;
}
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
@ -556,11 +563,11 @@ verify_edge_list (f, elist)
pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
BASIC_BLOCK (succ)));
}
for (succ = 0; succ < n_basic_blocks; succ++)
{
basic_block p = ENTRY_BLOCK_PTR;
basic_block s = BASIC_BLOCK (succ);
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
@ -569,12 +576,14 @@ verify_edge_list (f, elist)
found_edge = 1;
break;
}
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
@ -585,11 +594,11 @@ verify_edge_list (f, elist)
succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
BASIC_BLOCK (succ)));
}
for (pred = 0; pred < n_basic_blocks; pred++)
{
basic_block p = BASIC_BLOCK (pred);
basic_block s = EXIT_BLOCK_PTR;
int found_edge = 0;
for (e = p->succ; e; e = e->succ_next)
@ -598,12 +607,14 @@ verify_edge_list (f, elist)
found_edge = 1;
break;
}
for (e = s->pred; e; e = e->pred_next)
if (e->src == p)
{
found_edge = 1;
break;
}
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
== EDGE_INDEX_NO_EDGE && found_edge != 0)
fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
@ -625,12 +636,12 @@ find_edge_index (edge_list, pred, succ)
basic_block pred, succ;
{
int x;
for (x = 0; x < NUM_EDGES (edge_list); x++)
{
if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
return x;
}
if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
return x;
return (EDGE_INDEX_NO_EDGE);
}
@ -670,6 +681,7 @@ flow_edge_list_print (str, edge_list, num_edges, file)
for (i = 0; i < num_edges; i++)
fprintf (file, "%d->%d ", edge_list[i]->src->index,
edge_list[i]->dest->index);
fputs ("}\n", file);
}
@ -683,9 +695,11 @@ remove_fake_successors (bb)
basic_block bb;
{
edge e;
for (e = bb->succ; e;)
{
edge tmp = e;
e = e->succ_next;
if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
remove_edge (tmp);
@ -737,11 +751,10 @@ void
connect_infinite_loops_to_exit ()
{
basic_block unvisited_block;
struct depth_first_search_dsS dfs_ds;
/* Perform depth-first search in the reverse graph to find nodes
reachable from the exit block. */
struct depth_first_search_dsS dfs_ds;
flow_dfs_compute_reverse_init (&dfs_ds);
flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
@ -751,16 +764,17 @@ connect_infinite_loops_to_exit ()
unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
if (!unvisited_block)
break;
make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
}
flow_dfs_compute_reverse_finish (&dfs_ds);
return;
}
/* Compute reverse top sort order */
void
flow_reverse_top_sort_order_compute (rts_order)
int *rts_order;
@ -801,11 +815,9 @@ flow_reverse_top_sort_order_compute (rts_order)
SET_BIT (visited, dest->index);
if (dest->succ)
{
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
}
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
else
rts_order[postnum++] = dest->index;
}
@ -879,28 +891,21 @@ flow_depth_first_order_compute (dfs_order, rc_order)
dfsnum++;
if (dest->succ)
{
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
}
else
{
/* There are no successors for the DEST node so assign
its reverse completion number. */
if (rc_order)
rc_order[rcnum--] = dest->index;
}
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
else if (rc_order)
/* There are no successors for the DEST node so assign
its reverse completion number. */
rc_order[rcnum--] = dest->index;
}
else
{
if (! e->succ_next && src != ENTRY_BLOCK_PTR)
{
/* There are no more successors for the SRC node
so assign its reverse completion number. */
if (rc_order)
rc_order[rcnum--] = src->index;
}
if (! e->succ_next && src != ENTRY_BLOCK_PTR
&& rc_order)
/* There are no more successors for the SRC node
so assign its reverse completion number. */
rc_order[rcnum--] = src->index;
if (e->succ_next)
stack[sp - 1] = e->succ_next;
@ -920,10 +925,12 @@ flow_depth_first_order_compute (dfs_order, rc_order)
/* There are some nodes left in the CFG that are unreachable. */
if (dfsnum < n_basic_blocks)
abort ();
return dfsnum;
}
struct dfst_node {
struct dfst_node
{
unsigned nnodes;
struct dfst_node **node;
struct dfst_node *up;
@ -958,17 +965,20 @@ flow_preorder_transversal_compute (pot_order)
sp = 0;
/* Allocate the tree. */
dfst
= (struct dfst_node *) xcalloc (n_basic_blocks, sizeof (struct dfst_node));
dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
sizeof (struct dfst_node));
for (i = 0; i < n_basic_blocks; i++)
{
max_successors = 0;
for (e = BASIC_BLOCK (i)->succ; e; e = e->succ_next)
max_successors++;
dfst[i].node = max_successors ? (struct dfst_node **)
xcalloc (max_successors,
sizeof (struct dfst_node *))
: NULL;
dfst[i].node
= (max_successors
? (struct dfst_node **) xcalloc (max_successors,
sizeof (struct dfst_node *))
: NULL);
}
/* Allocate bitmap to track nodes that have been visited. */
@ -1005,19 +1015,15 @@ flow_preorder_transversal_compute (pot_order)
}
if (dest->succ)
{
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
}
/* Since the DEST node has been visited for the first
time, check its successors. */
stack[sp++] = dest->succ;
}
else if (e->succ_next)
stack[sp - 1] = e->succ_next;
else
{
if (e->succ_next)
stack[sp - 1] = e->succ_next;
else
sp--;
}
sp--;
}
free (stack);
@ -1046,6 +1052,7 @@ flow_preorder_transversal_compute (pot_order)
for (i = 0; i < n_basic_blocks; i++)
if (dfst[i].node)
free (dfst[i].node);
free (dfst);
}
@ -1084,9 +1091,8 @@ flow_dfs_compute_reverse_init (data)
depth_first_search_ds data;
{
/* Allocate stack for back-tracking up CFG. */
data->stack =
(basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
* sizeof (basic_block));
data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
* sizeof (basic_block));
data->sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
@ -1109,13 +1115,12 @@ flow_dfs_compute_reverse_add_bb (data, bb)
{
data->stack[data->sp++] = bb;
SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
return;
}
/* Continue the depth-first search through the reverse graph starting
with the block at the stack's top and ending when the stack is
empty. Visited nodes are marked. Returns an unvisited basic
block, or NULL if there is none available. */
/* Continue the depth-first search through the reverse graph starting with the
block at the stack's top and ending when the stack is empty. Visited nodes
are marked. Returns an unvisited basic block, or NULL if there is none
available. */
static basic_block
flow_dfs_compute_reverse_execute (data)
@ -1128,6 +1133,7 @@ flow_dfs_compute_reverse_execute (data)
while (data->sp > 0)
{
bb = data->stack[--data->sp];
/* Perform depth-first search on adjacent vertices. */
for (e = bb->pred; e; e = e->pred_next)
if (!TEST_BIT (data->visited_blocks,
@ -1136,9 +1142,10 @@ flow_dfs_compute_reverse_execute (data)
}
/* Determine if there are unvisited basic blocks. */
for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0; )
if (!TEST_BIT (data->visited_blocks, i))
return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
return NULL;
}
@ -1151,5 +1158,4 @@ flow_dfs_compute_reverse_finish (data)
{
free (data->stack);
sbitmap_free (data->visited_blocks);
return;
}

View File

@ -30,8 +30,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
- CFG construction
find_basic_blocks
- Local CFG construction
find_sub_basic_blocks
*/
find_sub_basic_blocks */
#include "config.h"
#include "system.h"
@ -46,8 +45,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "except.h"
#include "toplev.h"
#include "timevar.h"
#include "obstack.h"
static int count_basic_blocks PARAMS ((rtx));
static void find_basic_blocks_1 PARAMS ((rtx));
static rtx find_label_refs PARAMS ((rtx, rtx));
@ -59,7 +58,7 @@ static void find_bb_boundaries PARAMS ((basic_block));
static void compute_outgoing_frequencies PARAMS ((basic_block));
static bool inside_basic_block_p PARAMS ((rtx));
static bool control_flow_insn_p PARAMS ((rtx));
/* Return true if insn is something that should be contained inside basic
block. */
@ -71,18 +70,14 @@ inside_basic_block_p (insn)
{
case CODE_LABEL:
/* Avoid creating of basic block for jumptables. */
if (NEXT_INSN (insn)
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
return false;
return true;
return (NEXT_INSN (insn) == 0
|| GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
|| (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
&& GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
case JUMP_INSN:
if (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
return false;
return true;
return (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
case CALL_INSN:
case INSN:
@ -97,14 +92,15 @@ inside_basic_block_p (insn)
}
}
/* Return true if INSN may cause control flow transfer, so
it should be last in the basic block. */
/* Return true if INSN may cause control flow transfer, so it should be last in
the basic block. */
static bool
control_flow_insn_p (insn)
rtx insn;
{
rtx note;
switch (GET_CODE (insn))
{
case NOTE:
@ -113,23 +109,20 @@ control_flow_insn_p (insn)
case JUMP_INSN:
/* Jump insn always causes control transfer except for tablejumps. */
if (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
return false;
return true;
return (GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC);
case CALL_INSN:
/* Call insn may return to the nonlocal goto handler. */
if (nonlocal_goto_handler_labels
&& ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
|| INTVAL (XEXP (note, 0)) >= 0))
return true;
/* Or may trap. */
return can_throw_internal (insn);
return ((nonlocal_goto_handler_labels
&& (0 == (note = find_reg_note (insn, REG_EH_REGION,
NULL_RTX))
|| INTVAL (XEXP (note, 0)) >= 0))
/* Or may trap. */
|| can_throw_internal (insn));
case INSN:
return (flag_non_call_exceptions
&& can_throw_internal (insn));
return (flag_non_call_exceptions && can_throw_internal (insn));
case BARRIER:
/* It is nonsence to reach barrier when looking for the
@ -156,7 +149,6 @@ count_basic_blocks (f)
{
/* Code labels and barriers causes curent basic block to be
terminated at previous real insn. */
if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
&& saw_insn)
count++, saw_insn = false;
@ -169,6 +161,7 @@ count_basic_blocks (f)
if (saw_insn && control_flow_insn_p (insn))
count++, saw_insn = false;
}
if (saw_insn)
count++;
@ -185,6 +178,7 @@ count_basic_blocks (f)
/* Scan a list of insns for labels referred to other than by jumps.
This is used to scan the alternatives of a call placeholder. */
static rtx
find_label_refs (f, lvl)
rtx f;
@ -263,7 +257,7 @@ make_eh_edge (edge_cache, src, insn)
basic_block src;
rtx insn;
{
int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
rtx handlers, i;
handlers = reachable_handlers (insn);
@ -274,6 +268,7 @@ make_eh_edge (edge_cache, src, insn)
free_INSN_LIST_list (&handlers);
}
/* Identify the edges between basic blocks MIN to MAX.
NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
@ -305,6 +300,7 @@ make_edges (label_value_list, min, max, update_p)
for (i = min; i <= max; ++i)
{
edge e;
for (e = BASIC_BLOCK (i)->succ; e ; e = e->succ_next)
if (e->dest != EXIT_BLOCK_PTR)
SET_BIT (edge_cache[i], e->dest->index);
@ -313,7 +309,8 @@ make_edges (label_value_list, min, max, update_p)
/* By nature of the way these get numbered, block 0 is always the entry. */
if (min == 0)
cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
cached_make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0),
EDGE_FALLTHRU);
for (i = min; i <= max; ++i)
{
@ -322,8 +319,7 @@ make_edges (label_value_list, min, max, update_p)
enum rtx_code code;
int force_fallthru = 0;
if (GET_CODE (bb->head) == CODE_LABEL
&& LABEL_ALTERNATE_NAME (bb->head))
if (GET_CODE (bb->head) == CODE_LABEL && LABEL_ALTERNATE_NAME (bb->head))
cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
/* Examine the last instruction of the block, and discover the
@ -408,11 +404,10 @@ make_edges (label_value_list, min, max, update_p)
}
}
/* If this is a sibling call insn, then this is in effect a
combined call and return, and so we need an edge to the
exit block. No need to worry about EH edges, since we
wouldn't have created the sibling call in the first place. */
/* If this is a sibling call insn, then this is in effect a combined call
and return, and so we need an edge to the exit block. No need to
worry about EH edges, since we wouldn't have created the sibling call
in the first place. */
if (code == CALL_INSN && SIBLING_CALL_P (insn))
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
@ -420,9 +415,7 @@ make_edges (label_value_list, min, max, update_p)
/* If this is a CALL_INSN, then mark it as reaching the active EH
handler for this CALL_INSN. If we're handling non-call
exceptions then any insn can reach any of the active handlers.
Also mark the CALL_INSN as reaching any nonlocal goto handler. */
else if (code == CALL_INSN || flag_non_call_exceptions)
{
/* Add any appropriate EH edges. */
@ -432,14 +425,15 @@ make_edges (label_value_list, min, max, update_p)
{
/* ??? This could be made smarter: in some cases it's possible
to tell that certain calls will not do a nonlocal goto.
For example, if the nested functions that do the nonlocal
gotos do not have their addresses taken, then only calls to
those functions or to other nested functions that use them
could possibly do nonlocal gotos. */
/* We do know that a REG_EH_REGION note with a value less
than 0 is guaranteed not to perform a non-local goto. */
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note || INTVAL (XEXP (note, 0)) >= 0)
for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
make_label_edge (edge_cache, bb, XEXP (x, 0),
@ -457,7 +451,8 @@ make_edges (label_value_list, min, max, update_p)
if (GET_CODE (tmp) == NOTE)
tmp = next_nonnote_insn (tmp);
if (force_fallthru || insn == tmp)
cached_make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
cached_make_edge (edge_cache, bb, BASIC_BLOCK (i + 1),
EDGE_FALLTHRU);
}
}
@ -501,12 +496,14 @@ find_basic_blocks_1 (f)
head = end = NULL_RTX;
bb_note = NULL_RTX;
}
if (inside_basic_block_p (insn))
{
if (head == NULL_RTX)
head = insn;
end = insn;
}
if (head && control_flow_insn_p (insn))
{
create_basic_block_structure (i++, head, end, bb_note);
@ -676,14 +673,10 @@ find_basic_blocks (f, nregs, file)
}
/* State of basic block as seen by find_sub_basic_blocks. */
enum state
{
BLOCK_NEW = 0,
BLOCK_ORIGINAL,
BLOCK_TO_SPLIT
};
#define STATE(bb) (enum state)(size_t)(bb)->aux
#define SET_STATE(bb, state) (bb)->aux = (void *) (size_t) (state)
enum state {BLOCK_NEW = 0, BLOCK_ORIGINAL, BLOCK_TO_SPLIT};
#define STATE(BB) (enum state) ((size_t) (BB)->aux)
#define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE))
/* Scan basic block BB for possible BB boundaries inside the block
and create new basic blocks in the progress. */
@ -714,12 +707,14 @@ find_bb_boundaries (bb)
fallthru = split_block (bb, PREV_INSN (insn));
if (flow_transfer_insn)
bb->end = flow_transfer_insn;
bb = fallthru->dest;
remove_edge (fallthru);
flow_transfer_insn = NULL_RTX;
if (LABEL_ALTERNATE_NAME (insn))
make_edge (ENTRY_BLOCK_PTR, bb, 0);
}
/* In case we've previously seen an insn that effects a control
flow transfer, split the block. */
if (flow_transfer_insn && inside_basic_block_p (insn))
@ -730,6 +725,7 @@ find_bb_boundaries (bb)
remove_edge (fallthru);
flow_transfer_insn = NULL_RTX;
}
if (control_flow_insn_p (insn))
flow_transfer_insn = insn;
if (insn == end)
@ -757,6 +753,7 @@ compute_outgoing_frequencies (b)
basic_block b;
{
edge e, f;
if (b->succ && b->succ->succ_next && !b->succ->succ_next->succ_next)
{
rtx note = find_reg_note (b->end, REG_BR_PROB, NULL);
@ -764,8 +761,10 @@ compute_outgoing_frequencies (b)
if (!note)
return;
probability = INTVAL (XEXP (find_reg_note (b->end,
REG_BR_PROB, NULL), 0));
REG_BR_PROB, NULL),
0));
e = BRANCH_EDGE (b);
e->probability = probability;
e->count = ((b->count * probability + REG_BR_PROB_BASE / 2)
@ -774,6 +773,7 @@ compute_outgoing_frequencies (b)
f->probability = REG_BR_PROB_BASE - probability;
f->count = b->count - e->count;
}
if (b->succ && !b->succ->succ_next)
{
e = b->succ;
@ -797,15 +797,13 @@ find_many_sub_basic_blocks (blocks)
TEST_BIT (blocks, i) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL);
for (i = 0; i < n_basic_blocks; i++)
{
basic_block bb = BASIC_BLOCK (i);
if (STATE (bb) == BLOCK_TO_SPLIT)
find_bb_boundaries (bb);
}
if (STATE (BASIC_BLOCK (i)) == BLOCK_TO_SPLIT)
find_bb_boundaries (BASIC_BLOCK (i));
for (i = 0; i < n_basic_blocks; i++)
if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
break;
min = max = i;
for (; i < n_basic_blocks; i++)
if (STATE (BASIC_BLOCK (i)) != BLOCK_ORIGINAL)
@ -834,8 +832,10 @@ find_many_sub_basic_blocks (blocks)
b->frequency += EDGE_FREQUENCY (e);
}
}
compute_outgoing_frequencies (b);
}
for (i = 0; i < n_basic_blocks; i++)
SET_STATE (BASIC_BLOCK (i), 0);
}
@ -876,6 +876,7 @@ find_sub_basic_blocks (bb)
b->frequency += EDGE_FREQUENCY (e);
}
}
compute_outgoing_frequencies (b);
}
}