basic-block.h (last_basic_block): Defined as synonym for n_basic_blocks.

* basic-block.h (last_basic_block): Defined as synonym for
	n_basic_blocks.
	* cfganal.c (mark_dfs_back_edges, flow_reverse_top_sort_order_compute,
	flow_depth_first_order_compute, flow_preorder_transversal_compute,
	flow_dfs_compute_reverse_init): Replaced relevant occurences of
	n_basic_blocks with last_basic_block.
	* cfgbuild.c (make_edges): Likewise.
	* cfgloop.c (flow_loop_scan, flow_loops_find): Likewise.
	* cfgrtl.c (verify_flow_info, purge_all_dead_edges): Likewise.
	* combine.c (combine_instructions): Likewise.
	* df.c (df_alloc, df_analyse_1, df_analyse, iterative_dataflow_sbitmap,
	iterative_dataflow_bitmap): Likewise.
	* dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree,
	calc_idoms, idoms_to_doms): Likewise.
	* flow.c (update_life_info_in_dirty_blocks, free_basic_block_vars):
	Likewise.
	* gcse.c (gcse_main, alloc_gcse_mem, compute_local_properties,
	compute_hash_table, expr_reaches_here_p, one_classic_gcse_pass,
	one_cprop_pass, compute_pre_data, pre_expr_reaches_here_p,
	one_pre_gcse_pass, compute_transpout, delete_null_pointer_checks_1,
	delete_null_pointer_checks, compute_code_hoist_vbeinout,
	hoist_expr_reaches_here_p, hoist_code, one_code_hoisting_pass,
	compute_store_table, build_store_vectors): Likewise.
	* haifa-sched.c (sched_init): Likewise.
	* ifcvt.c (if_convert): Likewise.
	* lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete,
	pre_edge_lcm, compute_available, compute_nearerout,
	compute_rev_insert_delete, pre_edge_rev_lcm, optimize_mode_switching):
	Likewise.
	* predict.c (estimate_probability, process_note_prediction,
	note_prediction_to_br_prob): Likewise.
	* profile.c (GCOV_INDEX_TO_BB, BB_TO_GCOV_INDEX): Likewise.
	* recog.c (split_all_insns, peephole2_optimize): Likewise.
	* regrename.c (copyprop_hardreg_forward): Likewise.
	* resource.c (init_resource_info): Likewise.
	* sched-rgn.c (build_control_flow, find_rgns, compute_trg_info,
	init_regions, schedule_insns): Likewise.
	* ssa-ccp.c (ssa_const_prop): Likewise.
	* ssa-dce.c (ssa_eliminate_dead_code): Likewise.
	* ssa.c (compute_dominance_frontiers,
	compute_iterated_dominance_frontiers, convert_to_ssa): Likewise.

	* df.c (df_refs_unlink): Fix FOR_EACH_BB usage (in #if 0'ed code)
	* gcse.c (alloc_rd_mem, alloc_avail_expr_mem): Use n_blocks for vector
	sizes consistently.

From-SVN: r53924
This commit is contained in:
Zdenek Dvorak 2002-05-27 15:45:44 +02:00 committed by Zdenek Dvorak
parent 71d2c5bd9b
commit d55bc08175
23 changed files with 269 additions and 218 deletions

View File

@ -1,3 +1,51 @@
2002-05-27 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
* basic-block.h (last_basic_block): Defined as synonym for
n_basic_blocks.
* cfganal.c (mark_dfs_back_edges, flow_reverse_top_sort_order_compute,
flow_depth_first_order_compute, flow_preorder_transversal_compute,
flow_dfs_compute_reverse_init): Replaced relevant occurences of
n_basic_blocks with last_basic_block.
* cfgbuild.c (make_edges): Likewise.
* cfgloop.c (flow_loop_scan, flow_loops_find): Likewise.
* cfgrtl.c (verify_flow_info, purge_all_dead_edges): Likewise.
* combine.c (combine_instructions): Likewise.
* df.c (df_alloc, df_analyse_1, df_analyse, iterative_dataflow_sbitmap,
iterative_dataflow_bitmap): Likewise.
* dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree,
calc_idoms, idoms_to_doms): Likewise.
* flow.c (update_life_info_in_dirty_blocks, free_basic_block_vars):
Likewise.
* gcse.c (gcse_main, alloc_gcse_mem, compute_local_properties,
compute_hash_table, expr_reaches_here_p, one_classic_gcse_pass,
one_cprop_pass, compute_pre_data, pre_expr_reaches_here_p,
one_pre_gcse_pass, compute_transpout, delete_null_pointer_checks_1,
delete_null_pointer_checks, compute_code_hoist_vbeinout,
hoist_expr_reaches_here_p, hoist_code, one_code_hoisting_pass,
compute_store_table, build_store_vectors): Likewise.
* haifa-sched.c (sched_init): Likewise.
* ifcvt.c (if_convert): Likewise.
* lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete,
pre_edge_lcm, compute_available, compute_nearerout,
compute_rev_insert_delete, pre_edge_rev_lcm, optimize_mode_switching):
Likewise.
* predict.c (estimate_probability, process_note_prediction,
note_prediction_to_br_prob): Likewise.
* profile.c (GCOV_INDEX_TO_BB, BB_TO_GCOV_INDEX): Likewise.
* recog.c (split_all_insns, peephole2_optimize): Likewise.
* regrename.c (copyprop_hardreg_forward): Likewise.
* resource.c (init_resource_info): Likewise.
* sched-rgn.c (build_control_flow, find_rgns, compute_trg_info,
init_regions, schedule_insns): Likewise.
* ssa-ccp.c (ssa_const_prop): Likewise.
* ssa-dce.c (ssa_eliminate_dead_code): Likewise.
* ssa.c (compute_dominance_frontiers,
compute_iterated_dominance_frontiers, convert_to_ssa): Likewise.
* df.c (df_refs_unlink): Fix FOR_EACH_BB usage (in #if 0'ed code)
* gcse.c (alloc_rd_mem, alloc_avail_expr_mem): Use n_blocks for vector
sizes consistently.
Mon May 27 14:28:12 CEST 2002 Jan Hubicka <jh@suse.cz>
* basic-block.h (can_hoist_p, hoist_insn_after, hoist_insn_to_edge):

View File

@ -233,6 +233,10 @@ typedef struct basic_block_def {
extern int n_basic_blocks;
/* First free basic block number. */
#define last_basic_block n_basic_blocks
/* Number of edges in the current function. */
extern int n_edges;

View File

@ -120,15 +120,15 @@ mark_dfs_back_edges ()
bool found = false;
/* Allocate the preorder and postorder number arrays. */
pre = (int *) xcalloc (n_basic_blocks, sizeof (int));
post = (int *) xcalloc (n_basic_blocks, sizeof (int));
pre = (int *) xcalloc (last_basic_block, sizeof (int));
post = (int *) xcalloc (last_basic_block, sizeof (int));
/* Allocate stack for back-tracking up CFG. */
stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (visited);
@ -741,7 +741,7 @@ flow_reverse_top_sort_order_compute (rts_order)
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (visited);
@ -812,7 +812,7 @@ flow_depth_first_order_compute (dfs_order, rc_order)
sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (visited);
@ -918,7 +918,7 @@ flow_preorder_transversal_compute (pot_order)
sp = 0;
/* Allocate the tree. */
dfst = (struct dfst_node *) xcalloc (n_basic_blocks,
dfst = (struct dfst_node *) xcalloc (last_basic_block,
sizeof (struct dfst_node));
FOR_EACH_BB (bb)
@ -935,7 +935,7 @@ flow_preorder_transversal_compute (pot_order)
}
/* Allocate bitmap to track nodes that have been visited. */
visited = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (last_basic_block);
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (visited);
@ -1002,7 +1002,7 @@ flow_preorder_transversal_compute (pot_order)
/* Free the tree. */
for (i = 0; i < n_basic_blocks; i++)
for (i = 0; i < last_basic_block; i++)
if (dfst[i].node)
free (dfst[i].node);
@ -1049,7 +1049,7 @@ flow_dfs_compute_reverse_init (data)
data->sp = 0;
/* Allocate bitmap to track nodes that have been visited. */
data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
/* None of the nodes in the CFG have been visited yet. */
sbitmap_zero (data->visited_blocks);

View File

@ -295,8 +295,8 @@ make_edges (label_value_list, min, max, update_p)
amount of time searching the edge lists for duplicates. */
if (forced_labels || label_value_list)
{
edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
sbitmap_vector_zero (edge_cache, n_basic_blocks);
edge_cache = sbitmap_vector_alloc (last_basic_block, last_basic_block);
sbitmap_vector_zero (edge_cache, last_basic_block);
if (update_p)
FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)

View File

@ -600,7 +600,7 @@ flow_loop_scan (loops, loop, flags)
/* Determine which loop nodes dominate all the exits
of the loop. */
loop->exits_doms = sbitmap_alloc (n_basic_blocks);
loop->exits_doms = sbitmap_alloc (last_basic_block);
sbitmap_copy (loop->exits_doms, loop->nodes);
for (j = 0; j < loop->num_exits; j++)
sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
@ -663,7 +663,7 @@ flow_loops_find (loops, flags)
rc_order = NULL;
/* Compute the dominators. */
dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
dom = sbitmap_vector_alloc (last_basic_block, last_basic_block);
calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
/* Count the number of loop edges (back edges). This should be the
@ -705,10 +705,10 @@ flow_loops_find (loops, flags)
loops->array
= (struct loop *) xcalloc (num_loops, sizeof (struct loop));
headers = sbitmap_alloc (n_basic_blocks);
headers = sbitmap_alloc (last_basic_block);
sbitmap_zero (headers);
loops->shared_headers = sbitmap_alloc (n_basic_blocks);
loops->shared_headers = sbitmap_alloc (last_basic_block);
sbitmap_zero (loops->shared_headers);
/* Find and record information about all the natural loops
@ -760,7 +760,7 @@ flow_loops_find (loops, flags)
SET_BIT (headers, loop->header->index);
/* Find nodes contained within the loop. */
loop->nodes = sbitmap_alloc (n_basic_blocks);
loop->nodes = sbitmap_alloc (last_basic_block);
loop->num_nodes
= flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);

View File

@ -1712,9 +1712,9 @@ verify_flow_info ()
basic_block bb, last_bb_seen;
bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
last_visited = (basic_block *) xcalloc (last_basic_block + 2,
sizeof (basic_block));
edge_checksum = (size_t *) xcalloc (n_basic_blocks + 2, sizeof (size_t));
edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
/* Check bb chain & numbers. */
last_bb_seen = ENTRY_BLOCK_PTR;
@ -2317,7 +2317,7 @@ purge_all_dead_edges (update_life_p)
if (update_life_p)
{
blocks = sbitmap_alloc (n_basic_blocks);
blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
}

View File

@ -578,7 +578,7 @@ combine_instructions (f, nregs)
setup_incoming_promotions ();
refresh_blocks = sbitmap_alloc (n_basic_blocks);
refresh_blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (refresh_blocks);
need_refresh = 0;

View File

@ -547,7 +547,7 @@ df_alloc (df, n_regs)
df->uses = xmalloc (df->use_size * sizeof (*df->uses));
df->n_regs = n_regs;
df->n_bbs = n_basic_blocks;
df->n_bbs = last_basic_block;
/* Allocate temporary working array used during local dataflow analysis. */
df->reg_def_last = xmalloc (df->n_regs * sizeof (struct ref *));
@ -561,7 +561,7 @@ df_alloc (df, n_regs)
df->flags = 0;
df->bbs = xcalloc (df->n_bbs, sizeof (struct bb_info));
df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
df->all_blocks = BITMAP_XMALLOC ();
FOR_EACH_BB (bb)
@ -2006,9 +2006,9 @@ df_analyse_1 (df, blocks, flags, update)
df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
df->rts_order = xmalloc (sizeof(int) * n_basic_blocks);
df->inverse_dfs_map = xmalloc (sizeof(int) * n_basic_blocks);
df->inverse_rc_map = xmalloc (sizeof(int) * n_basic_blocks);
df->inverse_rts_map = xmalloc (sizeof(int) * n_basic_blocks);
df->inverse_dfs_map = xmalloc (sizeof(int) * last_basic_block);
df->inverse_rc_map = xmalloc (sizeof(int) * last_basic_block);
df->inverse_rts_map = xmalloc (sizeof(int) * last_basic_block);
flow_depth_first_order_compute (df->dfs_order, df->rc_order);
flow_reverse_top_sort_order_compute (df->rts_order);
@ -2023,10 +2023,10 @@ df_analyse_1 (df, blocks, flags, update)
/* Compute the sets of gens and kills for the defs of each bb. */
df_rd_local_compute (df, df->flags & DF_RD ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
@ -2059,10 +2059,10 @@ df_analyse_1 (df, blocks, flags, update)
uses in each bb. */
df_ru_local_compute (df, df->flags & DF_RU ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *gen = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *kill = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
@ -2098,10 +2098,10 @@ df_analyse_1 (df, blocks, flags, update)
/* Compute the sets of defs and uses of live variables. */
df_lr_local_compute (df, df->flags & DF_LR ? blocks : df->all_blocks);
{
bitmap *in = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *out = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *use = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *def = xmalloc (sizeof (bitmap) * n_basic_blocks);
bitmap *in = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
FOR_EACH_BB (bb)
{
in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
@ -2292,7 +2292,7 @@ df_analyse (df, blocks, flags)
/* We could deal with additional basic blocks being created by
rescanning everything again. */
if (df->n_bbs && df->n_bbs != (unsigned int)n_basic_blocks)
if (df->n_bbs && df->n_bbs != (unsigned int) last_basic_block)
abort ();
update = df_modified_p (df, blocks);
@ -2402,10 +2402,8 @@ df_refs_unlink (df, blocks)
}
else
{
FOR_EACH_BB (bb,
{
FOR_EACH_BB (bb)
df_bb_refs_unlink (df, bb);
});
}
}
#endif
@ -3846,8 +3844,8 @@ iterative_dataflow_sbitmap (in, out, gen, kill, blocks,
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
pending = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (n_basic_blocks);
pending = sbitmap_alloc (last_basic_block);
visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();
@ -3905,8 +3903,8 @@ iterative_dataflow_bitmap (in, out, gen, kill, blocks,
fibheap_t worklist;
basic_block bb;
sbitmap visited, pending;
pending = sbitmap_alloc (n_basic_blocks);
visited = sbitmap_alloc (n_basic_blocks);
pending = sbitmap_alloc (last_basic_block);
visited = sbitmap_alloc (last_basic_block);
sbitmap_zero (pending);
sbitmap_zero (visited);
worklist = fibheap_new ();

View File

@ -45,7 +45,7 @@
number of the corresponding basic block. Please note, that we include the
artificial ENTRY_BLOCK (or EXIT_BLOCK in the post-dom case) in our lists to
support multiple entry points. As it has no real basic block index we use
'n_basic_blocks' for that. Its dfs number is of course 1. */
'last_basic_block' for that. Its dfs number is of course 1. */
/* Type of Basic Block aka. TBB */
typedef unsigned int TBB;
@ -155,7 +155,7 @@ init_dom_info (di)
init_ar (di->set_size, unsigned int, num, 1);
init_ar (di->set_child, TBB, num, 0);
init_ar (di->dfs_order, TBB, (unsigned int) n_basic_blocks + 1, 0);
init_ar (di->dfs_order, TBB, (unsigned int) last_basic_block + 1, 0);
init_ar (di->dfs_to_bb, basic_block, num, 0);
di->dfsnum = 1;
@ -271,7 +271,7 @@ calc_dfs_tree_nonrec (di, bb, reverse)
if (bb != en_block)
my_i = di->dfs_order[bb->index];
else
my_i = di->dfs_order[n_basic_blocks];
my_i = di->dfs_order[last_basic_block];
child_i = di->dfs_order[bn->index] = di->dfsnum++;
di->dfs_to_bb[child_i] = bn;
di->dfs_parent[child_i] = my_i;
@ -314,7 +314,7 @@ calc_dfs_tree (di, reverse)
{
/* The first block is the ENTRY_BLOCK (or EXIT_BLOCK if REVERSE). */
basic_block begin = reverse ? EXIT_BLOCK_PTR : ENTRY_BLOCK_PTR;
di->dfs_order[n_basic_blocks] = di->dfsnum;
di->dfs_order[last_basic_block] = di->dfsnum;
di->dfs_to_bb[di->dfsnum] = begin;
di->dfsnum++;
@ -493,7 +493,7 @@ calc_idoms (di, reverse)
e_next = e->pred_next;
}
if (b == en_block)
k1 = di->dfs_order[n_basic_blocks];
k1 = di->dfs_order[last_basic_block];
else
k1 = di->dfs_order[b->index];
@ -541,10 +541,10 @@ idoms_to_doms (di, dominators)
{
TBB i, e_index;
int bb, bb_idom;
sbitmap_vector_zero (dominators, n_basic_blocks);
sbitmap_vector_zero (dominators, last_basic_block);
/* We have to be careful, to not include the ENTRY_BLOCK or EXIT_BLOCK
in the list of (post)-doms, so remember that in e_index. */
e_index = di->dfs_order[n_basic_blocks];
e_index = di->dfs_order[last_basic_block];
for (i = 1; i <= di->nodes; i++)
{
@ -576,8 +576,8 @@ idoms_to_doms (di, dominators)
}
/* The main entry point into this module. IDOM is an integer array with room
for n_basic_blocks integers, DOMS is a preallocated sbitmap array having
room for n_basic_blocks^2 bits, and POST is true if the caller wants to
for last_basic_block integers, DOMS is a preallocated sbitmap array having
room for last_basic_block^2 bits, and POST is true if the caller wants to
know post-dominators.
On return IDOM[i] will be the BB->index of the immediate (post) dominator

View File

@ -761,7 +761,7 @@ update_life_info_in_dirty_blocks (extent, prop_flags)
enum update_life_extent extent;
int prop_flags;
{
sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
sbitmap update_life_blocks = sbitmap_alloc (last_basic_block);
int n = 0;
basic_block bb;
int retval = 0;
@ -797,6 +797,7 @@ free_basic_block_vars (keep_head_end_p)
VARRAY_FREE (basic_block_info);
}
n_basic_blocks = 0;
last_basic_block = 0;
ENTRY_BLOCK_PTR->aux = NULL;
ENTRY_BLOCK_PTR->global_live_at_end = NULL;

View File

@ -834,11 +834,11 @@ gcse_main (f, file)
{
free_modify_mem_tables ();
modify_mem_list
= (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
= (rtx *) gmalloc (last_basic_block * sizeof (rtx));
canon_modify_mem_list
= (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
= (rtx *) gmalloc (last_basic_block * sizeof (rtx));
memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
orig_bb_count = n_basic_blocks;
}
free_reg_set_mem ();
@ -1019,14 +1019,14 @@ alloc_gcse_mem (f)
reg_set_bitmap = BITMAP_XMALLOC ();
/* Allocate vars to track sets of regs, memory per block. */
reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
max_gcse_regno);
/* Allocate array to keep a list of insns which modify memory in each
basic block. */
modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx));
memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx));
modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
modify_mem_list_set = BITMAP_XMALLOC ();
canon_modify_mem_list_set = BITMAP_XMALLOC ();
}
@ -1132,15 +1132,15 @@ compute_local_properties (transp, comp, antloc, setp)
if (transp)
{
if (setp)
sbitmap_vector_zero (transp, n_basic_blocks);
sbitmap_vector_zero (transp, last_basic_block);
else
sbitmap_vector_ones (transp, n_basic_blocks);
sbitmap_vector_ones (transp, last_basic_block);
}
if (comp)
sbitmap_vector_zero (comp, n_basic_blocks);
sbitmap_vector_zero (comp, last_basic_block);
if (antloc)
sbitmap_vector_zero (antloc, n_basic_blocks);
sbitmap_vector_zero (antloc, last_basic_block);
/* We use the same code for cprop, pre and hoisting. For cprop
we care about the set hash table, for pre and hoisting we
@ -2495,7 +2495,7 @@ compute_hash_table (set_p)
registers are set in which blocks.
??? This isn't needed during const/copy propagation, but it's cheap to
compute. Later. */
sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
sbitmap_vector_zero (reg_set_in_block, last_basic_block);
/* re-Cache any INSN_LIST nodes we have allocated. */
clear_modify_mem_tables ();
@ -2940,16 +2940,16 @@ alloc_rd_mem (n_blocks, n_insns)
int n_blocks, n_insns;
{
rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
sbitmap_vector_zero (rd_kill, n_basic_blocks);
sbitmap_vector_zero (rd_kill, n_blocks);
rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
sbitmap_vector_zero (rd_gen, n_basic_blocks);
sbitmap_vector_zero (rd_gen, n_blocks);
reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
sbitmap_vector_zero (reaching_defs, n_basic_blocks);
sbitmap_vector_zero (reaching_defs, n_blocks);
rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
sbitmap_vector_zero (rd_out, n_basic_blocks);
sbitmap_vector_zero (rd_out, n_blocks);
}
/* Free reaching def variables. */
@ -3071,16 +3071,16 @@ alloc_avail_expr_mem (n_blocks, n_exprs)
int n_blocks, n_exprs;
{
ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
sbitmap_vector_zero (ae_kill, n_basic_blocks);
sbitmap_vector_zero (ae_kill, n_blocks);
ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
sbitmap_vector_zero (ae_gen, n_basic_blocks);
sbitmap_vector_zero (ae_gen, n_blocks);
ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
sbitmap_vector_zero (ae_in, n_basic_blocks);
sbitmap_vector_zero (ae_in, n_blocks);
ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
sbitmap_vector_zero (ae_out, n_basic_blocks);
sbitmap_vector_zero (ae_out, n_blocks);
}
static void
@ -3285,7 +3285,7 @@ expr_reaches_here_p (occr, expr, bb, check_self_loop)
int check_self_loop;
{
int rval;
char *visited = (char *) xcalloc (n_basic_blocks, 1);
char *visited = (char *) xcalloc (last_basic_block, 1);
rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
@ -3675,7 +3675,7 @@ one_classic_gcse_pass (pass)
gcse_create_count = 0;
alloc_expr_hash_table (max_cuid);
alloc_rd_mem (n_basic_blocks, max_cuid);
alloc_rd_mem (last_basic_block, max_cuid);
compute_expr_hash_table ();
if (gcse_file)
dump_hash_table (gcse_file, "Expression", expr_hash_table,
@ -3685,7 +3685,7 @@ one_classic_gcse_pass (pass)
{
compute_kill_rd ();
compute_rd ();
alloc_avail_expr_mem (n_basic_blocks, n_exprs);
alloc_avail_expr_mem (last_basic_block, n_exprs);
compute_ae_gen ();
compute_ae_kill (ae_gen, ae_kill);
compute_available (ae_gen, ae_kill, ae_out, ae_in);
@ -4358,7 +4358,7 @@ one_cprop_pass (pass, alter_jumps)
n_sets);
if (n_sets > 0)
{
alloc_cprop_mem (n_basic_blocks, n_sets);
alloc_cprop_mem (last_basic_block, n_sets);
compute_cprop_data ();
changed = cprop (alter_jumps);
free_cprop_mem ();
@ -4472,7 +4472,7 @@ compute_pre_data ()
unsigned int ui;
compute_local_properties (transp, comp, antloc, 0);
sbitmap_vector_zero (ae_kill, n_basic_blocks);
sbitmap_vector_zero (ae_kill, last_basic_block);
/* Collect expressions which might trap. */
trapping_expr = sbitmap_alloc (n_exprs);
@ -4591,7 +4591,7 @@ pre_expr_reaches_here_p (occr_bb, expr, bb)
basic_block bb;
{
int rval;
char *visited = (char *) xcalloc (n_basic_blocks, 1);
char *visited = (char *) xcalloc (last_basic_block, 1);
rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
@ -5111,7 +5111,7 @@ one_pre_gcse_pass (pass)
if (n_exprs > 0)
{
alloc_pre_mem (n_basic_blocks, n_exprs);
alloc_pre_mem (last_basic_block, n_exprs);
compute_pre_data ();
changed |= pre_gcse ();
free_edge_list (edge_list);
@ -5199,7 +5199,7 @@ compute_transpout ()
unsigned int i;
struct expr *expr;
sbitmap_vector_ones (transpout, n_basic_blocks);
sbitmap_vector_ones (transpout, last_basic_block);
FOR_EACH_BB (bb)
{
@ -5281,8 +5281,8 @@ delete_null_pointer_checks_1 (block_reg, nonnull_avin,
Note that a register can have both properties in a single block. That
indicates that it's killed, then later in the block a new value is
computed. */
sbitmap_vector_zero (nonnull_local, n_basic_blocks);
sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
sbitmap_vector_zero (nonnull_local, last_basic_block);
sbitmap_vector_zero (nonnull_killed, last_basic_block);
FOR_EACH_BB (current_block)
{
@ -5459,18 +5459,18 @@ delete_null_pointer_checks (f)
/* We need four bitmaps, each with a bit for each register in each
basic block. */
max_reg = max_reg_num ();
regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
/* Allocate bitmaps to hold local and global properties. */
npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
/* Go through the basic blocks, seeing whether or not each block
ends with a conditional branch whose condition is a comparison
against zero. Record the register compared in BLOCK_REG. */
block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
FOR_EACH_BB (bb)
{
rtx last_insn = bb->end;
@ -5586,8 +5586,8 @@ compute_code_hoist_vbeinout ()
int changed, passes;
basic_block bb;
sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
sbitmap_vector_zero (hoist_vbeout, last_basic_block);
sbitmap_vector_zero (hoist_vbein, last_basic_block);
passes = 0;
changed = 1;
@ -5653,7 +5653,7 @@ hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
if (visited == NULL)
{
visited_allocated_locally = 1;
visited = xcalloc (n_basic_blocks, 1);
visited = xcalloc (last_basic_block, 1);
}
for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
@ -5696,7 +5696,7 @@ hoist_code ()
struct expr **index_map;
struct expr *expr;
sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
sbitmap_vector_zero (hoist_exprs, last_basic_block);
/* Compute a mapping from expression number (`bitmap_index') to
hash table entry. */
@ -5871,7 +5871,7 @@ one_code_hoisting_pass ()
if (n_exprs > 0)
{
alloc_code_hoist_mem (n_basic_blocks, n_exprs);
alloc_code_hoist_mem (last_basic_block, n_exprs);
compute_code_hoist_data ();
hoist_code ();
free_code_hoist_mem ();
@ -6453,9 +6453,9 @@ compute_store_table ()
max_gcse_regno = max_reg_num ();
reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
max_gcse_regno);
sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
sbitmap_vector_zero (reg_set_in_block, last_basic_block);
pre_ldst_mems = 0;
/* Find all the stores we care about. */
@ -6654,11 +6654,11 @@ build_store_vectors ()
/* Build the gen_vector. This is any store in the table which is not killed
by aliasing later in its block. */
ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
sbitmap_vector_zero (ae_gen, n_basic_blocks);
ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (ae_gen, last_basic_block);
st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
sbitmap_vector_zero (st_antloc, n_basic_blocks);
st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (st_antloc, last_basic_block);
for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
{
@ -6713,11 +6713,11 @@ build_store_vectors ()
free_INSN_LIST_list (&store_list);
}
ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
sbitmap_vector_zero (ae_kill, n_basic_blocks);
ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (ae_kill, last_basic_block);
transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
sbitmap_vector_zero (transp, n_basic_blocks);
transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
sbitmap_vector_zero (transp, last_basic_block);
for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
FOR_EACH_BB (b)
@ -6754,10 +6754,10 @@ build_store_vectors ()
{
fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
print_ldst_list (gcse_file);
dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks);
dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks);
dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks);
dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks);
dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
}
}

View File

@ -2384,7 +2384,7 @@ sched_init (dump_file)
{
rtx line;
line_note_head = (rtx *) xcalloc (n_basic_blocks, sizeof (rtx));
line_note_head = (rtx *) xcalloc (last_basic_block, sizeof (rtx));
/* Save-line-note-head:
Determine the line-number at the start of each basic block.

View File

@ -2700,7 +2700,7 @@ if_convert (x_life_data_ok)
post_dominators = NULL;
if (HAVE_conditional_execution || life_data_ok)
{
post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
post_dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
}
if (life_data_ok)

120
gcc/lcm.c
View File

@ -119,7 +119,7 @@ compute_antinout_edge (antloc, transp, antin, antout)
/* We want a maximal solution, so make an optimistic initialization of
ANTIN. */
sbitmap_vector_ones (antin, n_basic_blocks);
sbitmap_vector_ones (antin, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of ANTIN above. */
@ -348,10 +348,10 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
/* Computation of insertion and deletion points requires computing LATERIN
for the EXIT block. We allocated an extra entry in the LATERIN array
for just this purpose. */
sbitmap_ones (laterin[n_basic_blocks]);
sbitmap_ones (laterin[last_basic_block]);
for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next)
sbitmap_a_and_b (laterin[n_basic_blocks],
laterin[n_basic_blocks],
sbitmap_a_and_b (laterin[last_basic_block],
laterin[last_basic_block],
later[(size_t) e->aux]);
clear_aux_for_edges ();
@ -377,7 +377,7 @@ compute_insert_delete (edge_list, antloc, later, laterin,
basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
if (b == EXIT_BLOCK_PTR)
sbitmap_difference (insert[x], later[x], laterin[n_basic_blocks]);
sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
else
sbitmap_difference (insert[x], later[x], laterin[b->index]);
}
@ -413,29 +413,29 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
fprintf (file, "Edge List:\n");
verify_edge_list (file, edge_list);
print_edge_list (file, edge_list);
dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks);
dump_sbitmap_vector (file, "antloc", "", antloc, n_basic_blocks);
dump_sbitmap_vector (file, "avloc", "", avloc, n_basic_blocks);
dump_sbitmap_vector (file, "kill", "", kill, n_basic_blocks);
dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block);
dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block);
dump_sbitmap_vector (file, "kill", "", kill, last_basic_block);
}
#endif
/* Compute global availability. */
avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_available (avloc, kill, avout, avin);
sbitmap_vector_free (avin);
/* Compute global anticipatability. */
antin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
antout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_antinout_edge (antloc, transp, antin, antout);
#ifdef LCM_DEBUG_INFO
if (file)
{
dump_sbitmap_vector (file, "antin", "", antin, n_basic_blocks);
dump_sbitmap_vector (file, "antout", "", antout, n_basic_blocks);
dump_sbitmap_vector (file, "antin", "", antin, last_basic_block);
dump_sbitmap_vector (file, "antout", "", antout, last_basic_block);
}
#endif
@ -455,13 +455,13 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
later = sbitmap_vector_alloc (num_edges, n_exprs);
/* Allocate an extra element for the exit block in the laterin vector. */
laterin = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs);
laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
compute_laterin (edge_list, earliest, antloc, later, laterin);
#ifdef LCM_DEBUG_INFO
if (file)
{
dump_sbitmap_vector (file, "laterin", "", laterin, n_basic_blocks + 1);
dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1);
dump_sbitmap_vector (file, "later", "", later, num_edges);
}
#endif
@ -469,7 +469,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
sbitmap_vector_free (earliest);
*insert = sbitmap_vector_alloc (num_edges, n_exprs);
*delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
*delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete);
sbitmap_vector_free (laterin);
@ -480,7 +480,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
{
dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
n_basic_blocks);
last_basic_block);
}
#endif
@ -505,7 +505,7 @@ compute_available (avloc, kill, avout, avin)
= (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
/* We want a maximal solution. */
sbitmap_vector_ones (avout, n_basic_blocks);
sbitmap_vector_ones (avout, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of AVOUT above. */
@ -689,10 +689,10 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
/* Computation of insertion and deletion points requires computing NEAREROUT
for the ENTRY block. We allocated an extra entry in the NEAREROUT array
for just this purpose. */
sbitmap_ones (nearerout[n_basic_blocks]);
sbitmap_ones (nearerout[last_basic_block]);
for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next)
sbitmap_a_and_b (nearerout[n_basic_blocks],
nearerout[n_basic_blocks],
sbitmap_a_and_b (nearerout[last_basic_block],
nearerout[last_basic_block],
nearer[(size_t) e->aux]);
clear_aux_for_edges ();
@ -717,7 +717,7 @@ compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
{
basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
if (b == ENTRY_BLOCK_PTR)
sbitmap_difference (insert[x], nearer[x], nearerout[n_basic_blocks]);
sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
else
sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
}
@ -749,15 +749,15 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
edge_list = create_edge_list ();
num_edges = NUM_EDGES (edge_list);
st_antin = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs);
st_antout = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs);
sbitmap_vector_zero (st_antin, n_basic_blocks);
sbitmap_vector_zero (st_antout, n_basic_blocks);
st_antin = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
st_antout = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
sbitmap_vector_zero (st_antin, last_basic_block);
sbitmap_vector_zero (st_antout, last_basic_block);
compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
/* Compute global anticipatability. */
st_avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
st_avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_available (st_avloc, kill, st_avout, st_avin);
#ifdef LCM_DEBUG_INFO
@ -766,20 +766,20 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
fprintf (file, "Edge List:\n");
verify_edge_list (file, edge_list);
print_edge_list (file, edge_list);
dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks);
dump_sbitmap_vector (file, "st_avloc", "", st_avloc, n_basic_blocks);
dump_sbitmap_vector (file, "st_antloc", "", st_antloc, n_basic_blocks);
dump_sbitmap_vector (file, "st_antin", "", st_antin, n_basic_blocks);
dump_sbitmap_vector (file, "st_antout", "", st_antout, n_basic_blocks);
dump_sbitmap_vector (file, "st_kill", "", kill, n_basic_blocks);
dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block);
dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block);
dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block);
dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block);
dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (file)
{
dump_sbitmap_vector (file, "st_avout", "", st_avout, n_basic_blocks);
dump_sbitmap_vector (file, "st_avin", "", st_avin, n_basic_blocks);
dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block);
dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block);
}
#endif
@ -802,14 +802,14 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
nearer = sbitmap_vector_alloc (num_edges, n_exprs);
/* Allocate an extra element for the entry block. */
nearerout = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs);
nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
#ifdef LCM_DEBUG_INFO
if (file)
{
dump_sbitmap_vector (file, "nearerout", "", nearerout,
n_basic_blocks + 1);
last_basic_block + 1);
dump_sbitmap_vector (file, "nearer", "", nearer, num_edges);
}
#endif
@ -817,7 +817,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
sbitmap_vector_free (farthest);
*insert = sbitmap_vector_alloc (num_edges, n_exprs);
*delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
*delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
*insert, *delete);
@ -829,7 +829,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
{
dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
n_basic_blocks);
last_basic_block);
}
#endif
return edge_list;
@ -1030,8 +1030,8 @@ optimize_mode_switching (file)
clear_bb_flags ();
#ifdef NORMAL_MODE
/* Increment n_basic_blocks before allocating bb_info. */
n_basic_blocks++;
/* Increment last_basic_block before allocating bb_info. */
last_basic_block++;
#endif
for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
@ -1039,7 +1039,7 @@ optimize_mode_switching (file)
{
/* Create the list of segments within each basic block. */
bb_info[n_entities]
= (struct bb_info *) xcalloc (n_basic_blocks, sizeof **bb_info);
= (struct bb_info *) xcalloc (last_basic_block, sizeof **bb_info);
entity_map[n_entities++] = e;
if (num_modes[e] > max_num_modes)
max_num_modes = num_modes[e];
@ -1047,7 +1047,7 @@ optimize_mode_switching (file)
#ifdef NORMAL_MODE
/* Decrement it back in case we return below. */
n_basic_blocks--;
last_basic_block--;
#endif
if (! n_entities)
@ -1059,20 +1059,20 @@ optimize_mode_switching (file)
EXIT_BLOCK isn't optimized away. We do this by incrementing the
basic block count, growing the VARRAY of basic_block_info and
appending the EXIT_BLOCK_PTR to it. */
n_basic_blocks++;
if (VARRAY_SIZE (basic_block_info) < n_basic_blocks)
VARRAY_GROW (basic_block_info, n_basic_blocks);
BASIC_BLOCK (n_basic_blocks - 1) = EXIT_BLOCK_PTR;
EXIT_BLOCK_PTR->index = n_basic_blocks - 1;
last_basic_block++;
if (VARRAY_SIZE (basic_block_info) < last_basic_block)
VARRAY_GROW (basic_block_info, last_basic_block);
BASIC_BLOCK (last_basic_block - 1) = EXIT_BLOCK_PTR;
EXIT_BLOCK_PTR->index = last_basic_block - 1;
#endif
/* Create the bitmap vectors. */
antic = sbitmap_vector_alloc (n_basic_blocks, n_entities);
transp = sbitmap_vector_alloc (n_basic_blocks, n_entities);
comp = sbitmap_vector_alloc (n_basic_blocks, n_entities);
antic = sbitmap_vector_alloc (last_basic_block, n_entities);
transp = sbitmap_vector_alloc (last_basic_block, n_entities);
comp = sbitmap_vector_alloc (last_basic_block, n_entities);
sbitmap_vector_ones (transp, n_basic_blocks);
sbitmap_vector_ones (transp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
@ -1169,14 +1169,14 @@ optimize_mode_switching (file)
#endif /* NORMAL_MODE */
}
kill = sbitmap_vector_alloc (n_basic_blocks, n_entities);
kill = sbitmap_vector_alloc (last_basic_block, n_entities);
for (i = 0; i < max_num_modes; i++)
{
int current_mode[N_ENTITIES];
/* Set the anticipatable and computing arrays. */
sbitmap_vector_zero (antic, n_basic_blocks);
sbitmap_vector_zero (comp, n_basic_blocks);
sbitmap_vector_zero (antic, last_basic_block);
sbitmap_vector_zero (comp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
@ -1301,10 +1301,10 @@ optimize_mode_switching (file)
int no_mode = num_modes[entity_map[j]];
#ifdef NORMAL_MODE
if (bb_info[j][n_basic_blocks].seginfo->mode != no_mode)
if (bb_info[j][last_basic_block].seginfo->mode != no_mode)
{
edge eg;
struct seginfo *ptr = bb_info[j][n_basic_blocks].seginfo;
struct seginfo *ptr = bb_info[j][last_basic_block].seginfo;
for (eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next)
{

View File

@ -412,8 +412,8 @@ estimate_probability (loops_info)
basic_block bb;
int i;
dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
post_dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
@ -756,7 +756,7 @@ process_note_prediction (bb, heads, dominators, post_dominators, pred, flags)
/* Now find the edge that leads to our branch and aply the prediction. */
if (y == n_basic_blocks)
if (y == last_basic_block)
return;
for (e = BASIC_BLOCK (y)->succ; e; e = e->succ_next)
if (e->dest->index >= 0
@ -841,15 +841,15 @@ note_prediction_to_br_prob ()
add_noreturn_fake_exit_edges ();
connect_infinite_loops_to_exit ();
dominators = xmalloc (sizeof (int) * n_basic_blocks);
memset (dominators, -1, sizeof (int) * n_basic_blocks);
post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
dominators = xmalloc (sizeof (int) * last_basic_block);
memset (dominators, -1, sizeof (int) * last_basic_block);
post_dominators = sbitmap_vector_alloc (last_basic_block, last_basic_block);
calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
calculate_dominance_info (dominators, NULL, CDI_DOMINATORS);
heads = xmalloc (sizeof (int) * n_basic_blocks);
memset (heads, -1, sizeof (int) * n_basic_blocks);
heads[ENTRY_BLOCK_PTR->next_bb->index] = n_basic_blocks;
heads = xmalloc (sizeof (int) * last_basic_block);
memset (heads, -1, sizeof (int) * last_basic_block);
heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
/* Process all prediction notes. */

View File

@ -73,11 +73,11 @@ struct bb_info
/* Keep all basic block indexes nonnegative in the gcov output. Index 0
is used for entry block, last block exit block. */
#define GCOV_INDEX_TO_BB(i) ((i) == 0 ? ENTRY_BLOCK_PTR \
: (((i) == n_basic_blocks + 1) \
: (((i) == last_basic_block + 1) \
? EXIT_BLOCK_PTR : BASIC_BLOCK ((i)-1)))
#define BB_TO_GCOV_INDEX(bb) ((bb) == ENTRY_BLOCK_PTR ? 0 \
: ((bb) == EXIT_BLOCK_PTR \
? n_basic_blocks + 1 : (bb)->index + 1))
? last_basic_block + 1 : (bb)->index + 1))
/* Instantiate the profile info structure. */

View File

@ -2729,7 +2729,7 @@ split_all_insns (upd_life)
int changed;
basic_block bb;
blocks = sbitmap_alloc (n_basic_blocks);
blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
changed = 0;
@ -3013,7 +3013,7 @@ peephole2_optimize (dump_file)
live = INITIALIZE_REG_SET (rs_heads[i]);
#ifdef HAVE_conditional_execution
blocks = sbitmap_alloc (n_basic_blocks);
blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
changed = false;
#else

View File

@ -1729,7 +1729,7 @@ copyprop_hardreg_forward ()
need_refresh = false;
all_vd = xmalloc (sizeof (struct value_data) * n_basic_blocks);
all_vd = xmalloc (sizeof (struct value_data) * last_basic_block);
FOR_EACH_BB (bb)
{

View File

@ -1240,7 +1240,7 @@ init_resource_info (epilogue_insn)
/* Allocate and initialize the tables used by mark_target_live_regs. */
target_hash_table = (struct target_info **)
xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int));
bb_ticks = (int *) xcalloc (last_basic_block, sizeof (int));
}
/* Free up the resources allcated to mark_target_live_regs (). This

View File

@ -403,8 +403,8 @@ build_control_flow (edge_list)
}
/* ??? We can kill these soon. */
in_edges = (int *) xcalloc (n_basic_blocks, sizeof (int));
out_edges = (int *) xcalloc (n_basic_blocks, sizeof (int));
in_edges = (int *) xcalloc (last_basic_block, sizeof (int));
out_edges = (int *) xcalloc (last_basic_block, sizeof (int));
edge_table = (haifa_edge *) xcalloc (num_edges, sizeof (haifa_edge));
nr_edges = 0;
@ -661,23 +661,23 @@ find_rgns (edge_list, dom)
STACK, SP and DFS_NR are only used during the first traversal. */
/* Allocate and initialize variables for the first traversal. */
max_hdr = (int *) xmalloc (n_basic_blocks * sizeof (int));
dfs_nr = (int *) xcalloc (n_basic_blocks, sizeof (int));
max_hdr = (int *) xmalloc (last_basic_block * sizeof (int));
dfs_nr = (int *) xcalloc (last_basic_block, sizeof (int));
stack = (int *) xmalloc (nr_edges * sizeof (int));
inner = sbitmap_alloc (n_basic_blocks);
inner = sbitmap_alloc (last_basic_block);
sbitmap_ones (inner);
header = sbitmap_alloc (n_basic_blocks);
header = sbitmap_alloc (last_basic_block);
sbitmap_zero (header);
passed = sbitmap_alloc (nr_edges);
sbitmap_zero (passed);
in_queue = sbitmap_alloc (n_basic_blocks);
in_queue = sbitmap_alloc (last_basic_block);
sbitmap_zero (in_queue);
in_stack = sbitmap_alloc (n_basic_blocks);
in_stack = sbitmap_alloc (last_basic_block);
sbitmap_zero (in_stack);
for (i = 0; i < n_basic_blocks; i++)
@ -1197,8 +1197,8 @@ compute_trg_info (trg)
add the TO block to the update block list. This list can end
up with a lot of duplicates. We need to weed them out to avoid
overrunning the end of the bblst_table. */
update_blocks = (char *) alloca (n_basic_blocks);
memset (update_blocks, 0, n_basic_blocks);
update_blocks = (char *) alloca (last_basic_block);
memset (update_blocks, 0, last_basic_block);
update_idx = 0;
for (j = 0; j < el.nr_members; j++)
@ -2890,8 +2890,8 @@ init_regions ()
nr_regions = 0;
rgn_table = (region *) xmalloc ((n_basic_blocks) * sizeof (region));
rgn_bb_table = (int *) xmalloc ((n_basic_blocks) * sizeof (int));
block_to_bb = (int *) xmalloc ((n_basic_blocks) * sizeof (int));
containing_rgn = (int *) xmalloc ((n_basic_blocks) * sizeof (int));
block_to_bb = (int *) xmalloc ((last_basic_block) * sizeof (int));
containing_rgn = (int *) xmalloc ((last_basic_block) * sizeof (int));
/* Compute regions for scheduling. */
if (reload_completed
@ -2912,7 +2912,7 @@ init_regions ()
sbitmap *dom;
struct edge_list *edge_list;
dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
dom = sbitmap_vector_alloc (last_basic_block, last_basic_block);
/* The scheduler runs after flow; therefore, we can't blindly call
back into find_basic_blocks since doing so could invalidate the
@ -2953,7 +2953,7 @@ init_regions ()
if (CHECK_DEAD_NOTES)
{
blocks = sbitmap_alloc (n_basic_blocks);
blocks = sbitmap_alloc (last_basic_block);
deaths_in_region = (int *) xmalloc (sizeof (int) * nr_regions);
/* Remove all death notes from the subroutine. */
for (rgn = 0; rgn < nr_regions; rgn++)
@ -3021,12 +3021,12 @@ schedule_insns (dump_file)
compute_bb_for_insn (get_max_uid ());
any_large_regions = 0;
large_region_blocks = sbitmap_alloc (n_basic_blocks);
large_region_blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (large_region_blocks);
FOR_EACH_BB (bb)
SET_BIT (large_region_blocks, bb->index);
blocks = sbitmap_alloc (n_basic_blocks);
blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (blocks);
/* Update life information. For regions consisting of multiple blocks

View File

@ -1016,7 +1016,7 @@ ssa_const_prop ()
ssa_edges = sbitmap_alloc (VARRAY_SIZE (ssa_definition));
sbitmap_zero (ssa_edges);
executable_blocks = sbitmap_alloc (n_basic_blocks);
executable_blocks = sbitmap_alloc (last_basic_block);
sbitmap_zero (executable_blocks);
executable_edges = sbitmap_alloc (NUM_EDGES (edges));

View File

@ -506,7 +506,7 @@ ssa_eliminate_dead_code ()
mark_all_insn_unnecessary ();
VARRAY_RTX_INIT (unprocessed_instructions, 64,
"unprocessed instructions");
cdbte = control_dependent_block_to_edge_map_create (n_basic_blocks);
cdbte = control_dependent_block_to_edge_map_create (last_basic_block);
/* Prepare for use of BLOCK_NUM (). */
connect_infinite_loops_to_exit ();
@ -514,12 +514,12 @@ ssa_eliminate_dead_code ()
compute_bb_for_insn (max_insn_uid);
/* Compute control dependence. */
pdom = (int *) xmalloc (n_basic_blocks * sizeof (int));
for (i = 0; i < n_basic_blocks; ++i)
pdom = (int *) xmalloc (last_basic_block * sizeof (int));
for (i = 0; i < last_basic_block; ++i)
pdom[i] = INVALID_BLOCK;
calculate_dominance_info (pdom, NULL, CDI_POST_DOMINATORS);
/* Assume there is a path from each node to the exit block. */
for (i = 0; i < n_basic_blocks; ++i)
for (i = 0; i < last_basic_block; ++i)
if (pdom[i] == INVALID_BLOCK)
pdom[i] = EXIT_BLOCK;
el = create_edge_list ();

View File

@ -559,7 +559,7 @@ compute_dominance_frontiers (frontiers, idom)
sbitmap *frontiers;
int *idom;
{
sbitmap done = sbitmap_alloc (n_basic_blocks);
sbitmap done = sbitmap_alloc (last_basic_block);
sbitmap_zero (done);
compute_dominance_frontiers_1 (frontiers, idom, 0, done);
@ -585,7 +585,7 @@ compute_iterated_dominance_frontiers (idfs, frontiers, evals, nregs)
sbitmap worklist;
int reg, passes = 0;
worklist = sbitmap_alloc (n_basic_blocks);
worklist = sbitmap_alloc (last_basic_block);
for (reg = 0; reg < nregs; ++reg)
{
@ -1150,8 +1150,8 @@ convert_to_ssa ()
dead code. We'll let the SSA optimizers do that. */
life_analysis (get_insns (), NULL, 0);
idom = (int *) alloca (n_basic_blocks * sizeof (int));
memset ((void *) idom, -1, (size_t) n_basic_blocks * sizeof (int));
idom = (int *) alloca (last_basic_block * sizeof (int));
memset ((void *) idom, -1, (size_t) last_basic_block * sizeof (int));
calculate_dominance_info (idom, NULL, CDI_DOMINATORS);
if (rtl_dump_file)
@ -1164,13 +1164,13 @@ convert_to_ssa ()
/* Compute dominance frontiers. */
dfs = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
dfs = sbitmap_vector_alloc (last_basic_block, last_basic_block);
compute_dominance_frontiers (dfs, idom);
if (rtl_dump_file)
{
dump_sbitmap_vector (rtl_dump_file, ";; Dominance Frontiers:",
"; Basic Block", dfs, n_basic_blocks);
"; Basic Block", dfs, last_basic_block);
fflush (rtl_dump_file);
}
@ -1178,12 +1178,12 @@ convert_to_ssa ()
ssa_max_reg_num = max_reg_num ();
nregs = ssa_max_reg_num;
evals = sbitmap_vector_alloc (nregs, n_basic_blocks);
evals = sbitmap_vector_alloc (nregs, last_basic_block);
find_evaluations (evals, nregs);
/* Compute the iterated dominance frontier for each register. */
idfs = sbitmap_vector_alloc (nregs, n_basic_blocks);
idfs = sbitmap_vector_alloc (nregs, last_basic_block);
compute_iterated_dominance_frontiers (idfs, dfs, evals, nregs);
if (rtl_dump_file)